| 1 | /*
 | 
|---|
| 2 |  * Project: MoleCuilder
 | 
|---|
| 3 |  * Description: creates and alters molecular systems
 | 
|---|
| 4 |  * Copyright (C)  2010-2012 University of Bonn. All rights reserved.
 | 
|---|
| 5 |  * 
 | 
|---|
| 6 |  *
 | 
|---|
| 7 |  *   This file is part of MoleCuilder.
 | 
|---|
| 8 |  *
 | 
|---|
| 9 |  *    MoleCuilder is free software: you can redistribute it and/or modify
 | 
|---|
| 10 |  *    it under the terms of the GNU General Public License as published by
 | 
|---|
| 11 |  *    the Free Software Foundation, either version 2 of the License, or
 | 
|---|
| 12 |  *    (at your option) any later version.
 | 
|---|
| 13 |  *
 | 
|---|
| 14 |  *    MoleCuilder is distributed in the hope that it will be useful,
 | 
|---|
| 15 |  *    but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
|---|
| 16 |  *    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
|---|
| 17 |  *    GNU General Public License for more details.
 | 
|---|
| 18 |  *
 | 
|---|
| 19 |  *    You should have received a copy of the GNU General Public License
 | 
|---|
| 20 |  *    along with MoleCuilder.  If not, see <http://www.gnu.org/licenses/>.
 | 
|---|
| 21 |  */
 | 
|---|
| 22 | 
 | 
|---|
| 23 | /*
 | 
|---|
| 24 |  * FragmentationAutomationAction.cpp
 | 
|---|
| 25 |  *
 | 
|---|
| 26 |  *  Created on: May 18, 2012
 | 
|---|
| 27 |  *      Author: heber
 | 
|---|
| 28 |  */
 | 
|---|
| 29 | 
 | 
|---|
| 30 | // include config.h
 | 
|---|
| 31 | #ifdef HAVE_CONFIG_H
 | 
|---|
| 32 | #include <config.h>
 | 
|---|
| 33 | #endif
 | 
|---|
| 34 | 
 | 
|---|
| 35 | #include <boost/archive/text_iarchive.hpp>
 | 
|---|
| 36 | // boost asio needs specific operator new
 | 
|---|
| 37 | #include <boost/asio.hpp>
 | 
|---|
| 38 | 
 | 
|---|
| 39 | #include "CodePatterns/MemDebug.hpp"
 | 
|---|
| 40 | 
 | 
|---|
| 41 | // include headers that implement a archive in simple text format
 | 
|---|
| 42 | #include <boost/archive/text_oarchive.hpp>
 | 
|---|
| 43 | #include <boost/archive/text_iarchive.hpp>
 | 
|---|
| 44 | 
 | 
|---|
| 45 | 
 | 
|---|
| 46 | #include <boost/mpl/remove.hpp>
 | 
|---|
| 47 | #include <boost/lambda/lambda.hpp>
 | 
|---|
| 48 | 
 | 
|---|
| 49 | #include <iostream>
 | 
|---|
| 50 | 
 | 
|---|
| 51 | #include "CodePatterns/Assert.hpp"
 | 
|---|
| 52 | #include "CodePatterns/Info.hpp"
 | 
|---|
| 53 | #include "CodePatterns/Log.hpp"
 | 
|---|
| 54 | #include "JobMarket/Jobs/FragmentJob.hpp"
 | 
|---|
| 55 | 
 | 
|---|
| 56 | #include "Fragmentation/Automation/createMatrixNrLookup.hpp"
 | 
|---|
| 57 | #include "Fragmentation/Automation/extractJobIds.hpp"
 | 
|---|
| 58 | #include "Fragmentation/Automation/FragmentationChargeDensity.hpp"
 | 
|---|
| 59 | #include "Fragmentation/Automation/FragmentationResults.hpp"
 | 
|---|
| 60 | #include "Fragmentation/Automation/MPQCFragmentController.hpp"
 | 
|---|
| 61 | #include "Fragmentation/Automation/parseKeySetFile.hpp"
 | 
|---|
| 62 | #include "Fragmentation/Automation/VMGDebugGridFragmentController.hpp"
 | 
|---|
| 63 | #include "Fragmentation/Automation/VMGFragmentController.hpp"
 | 
|---|
| 64 | #include "Fragmentation/EnergyMatrix.hpp"
 | 
|---|
| 65 | #include "Fragmentation/ForceMatrix.hpp"
 | 
|---|
| 66 | #include "Fragmentation/Fragmentation.hpp"
 | 
|---|
| 67 | #include "Fragmentation/HydrogenSaturation_enum.hpp"
 | 
|---|
| 68 | #include "Fragmentation/Homology/HomologyContainer.hpp"
 | 
|---|
| 69 | #include "Fragmentation/Homology/HomologyGraph.hpp"
 | 
|---|
| 70 | #include "Fragmentation/KeySet.hpp"
 | 
|---|
| 71 | #include "Fragmentation/KeySetsContainer.hpp"
 | 
|---|
| 72 | #include "Fragmentation/SetValues/Fragment.hpp"
 | 
|---|
| 73 | #include "Fragmentation/SetValues/Histogram.hpp"
 | 
|---|
| 74 | #include "Fragmentation/SetValues/IndexedVectors.hpp"
 | 
|---|
| 75 | #include "Fragmentation/Summation/IndexSetContainer.hpp"
 | 
|---|
| 76 | #include "Fragmentation/Summation/writeTable.hpp"
 | 
|---|
| 77 | #include "Graph/DepthFirstSearchAnalysis.hpp"
 | 
|---|
| 78 | #include "Jobs/MPQCJob.hpp"
 | 
|---|
| 79 | #include "Jobs/MPQCData.hpp"
 | 
|---|
| 80 | #include "Jobs/MPQCData_printKeyNames.hpp"
 | 
|---|
| 81 | #ifdef HAVE_VMG
 | 
|---|
| 82 | #include "Jobs/VMGDebugGridJob.hpp"
 | 
|---|
| 83 | #include "Jobs/VMGJob.hpp"
 | 
|---|
| 84 | #include "Jobs/VMGData.hpp"
 | 
|---|
| 85 | #include "Jobs/VMGDataFused.hpp"
 | 
|---|
| 86 | #include "Jobs/VMGDataMap.hpp"
 | 
|---|
| 87 | #include "Jobs/VMGData_printKeyNames.hpp"
 | 
|---|
| 88 | #endif
 | 
|---|
| 89 | #include "World.hpp"
 | 
|---|
| 90 | 
 | 
|---|
| 91 | #include <fstream>
 | 
|---|
| 92 | #include <iostream>
 | 
|---|
| 93 | #include <string>
 | 
|---|
| 94 | #include <vector>
 | 
|---|
| 95 | 
 | 
|---|
| 96 | #include <boost/mpl/for_each.hpp>
 | 
|---|
| 97 | 
 | 
|---|
| 98 | #include "Actions/FragmentationAction/FragmentationAutomationAction.hpp"
 | 
|---|
| 99 | 
 | 
|---|
| 100 | using namespace MoleCuilder;
 | 
|---|
| 101 | 
 | 
|---|
| 102 | // and construct the stuff
 | 
|---|
| 103 | #include "FragmentationAutomationAction.def"
 | 
|---|
| 104 | #include "Action_impl_pre.hpp"
 | 
|---|
| 105 | /** =========== define the function ====================== */
 | 
|---|
| 106 | 
 | 
|---|
| 107 | class controller_AddOn;
 | 
|---|
| 108 | 
 | 
|---|
| 109 | // needs to be defined for using the FragmentController
 | 
|---|
| 110 | controller_AddOn *getAddOn()
 | 
|---|
| 111 | {
 | 
|---|
| 112 |   return NULL;
 | 
|---|
| 113 | }
 | 
|---|
| 114 | 
 | 
|---|
| 115 | /** Helper function to get number of atoms somehow.
 | 
|---|
| 116 |  *
 | 
|---|
| 117 |  * Here, we just parse the number of lines in the adjacency file as
 | 
|---|
| 118 |  * it should correspond to the number of atoms, except when some atoms
 | 
|---|
| 119 |  * are not bonded, but then fragmentation makes no sense.
 | 
|---|
| 120 |  *
 | 
|---|
| 121 |  * @param path path to the adjacency file
 | 
|---|
| 122 |  */
 | 
|---|
| 123 | size_t getNoAtomsFromAdjacencyFile(const std::string &path)
 | 
|---|
| 124 | {
 | 
|---|
| 125 |   size_t NoAtoms = 0;
 | 
|---|
| 126 | 
 | 
|---|
| 127 |   // parse in special file to get atom count (from line count)
 | 
|---|
| 128 |   std::string filename(path);
 | 
|---|
| 129 |   filename += FRAGMENTPREFIX;
 | 
|---|
| 130 |   filename += ADJACENCYFILE;
 | 
|---|
| 131 |   std::ifstream adjacency(filename.c_str());
 | 
|---|
| 132 |   if (adjacency.fail()) {
 | 
|---|
| 133 |     LOG(0, endl << "getNoAtomsFromAdjacencyFile() - Unable to open " << filename << ", is the directory correct?");
 | 
|---|
| 134 |     return false;
 | 
|---|
| 135 |   }
 | 
|---|
| 136 |   std::string buffer;
 | 
|---|
| 137 |   while (getline(adjacency, buffer))
 | 
|---|
| 138 |     NoAtoms++;
 | 
|---|
| 139 |   LOG(1, "INFO: There are " << NoAtoms << " atoms.");
 | 
|---|
| 140 | 
 | 
|---|
| 141 |   return NoAtoms;
 | 
|---|
| 142 | }
 | 
|---|
| 143 | 
 | 
|---|
| 144 | 
 | 
|---|
| 145 | 
 | 
|---|
| 146 | /** Place results from FragmentResult into EnergyMatrix and ForceMatrix.
 | 
|---|
| 147 |  *
 | 
|---|
| 148 |  * @param fragmentData MPQCData resulting from the jobs
 | 
|---|
| 149 |  * @param MatrixNrLookup Lookup up-map from job id to fragment number
 | 
|---|
| 150 |  * @param FragmentCounter total number of fragments
 | 
|---|
| 151 |  * @param NoAtoms total number of atoms
 | 
|---|
| 152 |  * @param Energy energy matrix to be filled on return
 | 
|---|
| 153 |  * @param Force force matrix to be filled on return
 | 
|---|
| 154 |  * @return true - everything ok, false - else
 | 
|---|
| 155 |  */
 | 
|---|
| 156 | bool putResultsintoMatrices(
 | 
|---|
| 157 |     const std::map<JobId_t, MPQCData> &fragmentData,
 | 
|---|
| 158 |     const std::map< JobId_t, size_t > &MatrixNrLookup,
 | 
|---|
| 159 |     const size_t FragmentCounter,
 | 
|---|
| 160 |     const size_t NoAtoms,
 | 
|---|
| 161 |     EnergyMatrix &Energy,
 | 
|---|
| 162 |     ForceMatrix &Force)
 | 
|---|
| 163 | {
 | 
|---|
| 164 |   for (std::map<JobId_t, MPQCData>::const_iterator dataiter = fragmentData.begin();
 | 
|---|
| 165 |       dataiter != fragmentData.end(); ++dataiter) {
 | 
|---|
| 166 |     const MPQCData &extractedData = dataiter->second;
 | 
|---|
| 167 |     const JobId_t &jobid = dataiter->first;
 | 
|---|
| 168 |     std::map< JobId_t, size_t >::const_iterator nriter = MatrixNrLookup.find(jobid);
 | 
|---|
| 169 |     ASSERT( nriter != MatrixNrLookup.end(),
 | 
|---|
| 170 |         "putResultsintoMatrices() - MatrixNrLookup does not contain id "
 | 
|---|
| 171 |         +toString(jobid)+".");
 | 
|---|
| 172 |     // place results into EnergyMatrix ...
 | 
|---|
| 173 |     {
 | 
|---|
| 174 |       MatrixContainer::MatrixArray matrix;
 | 
|---|
| 175 |       matrix.resize(1);
 | 
|---|
| 176 |       matrix[0].resize(1, extractedData.energies.total);
 | 
|---|
| 177 |       if (!Energy.AddMatrix(
 | 
|---|
| 178 |           std::string("MPQCJob ")+toString(jobid),
 | 
|---|
| 179 |           matrix,
 | 
|---|
| 180 |           nriter->second)) {
 | 
|---|
| 181 |         ELOG(1, "Adding energy matrix failed.");
 | 
|---|
| 182 |         return false;
 | 
|---|
| 183 |       }
 | 
|---|
| 184 |     }
 | 
|---|
| 185 |     // ... and ForceMatrix (with two empty columns in front)
 | 
|---|
| 186 |     {
 | 
|---|
| 187 |       MatrixContainer::MatrixArray matrix;
 | 
|---|
| 188 |       const size_t rows = extractedData.forces.size();
 | 
|---|
| 189 |       matrix.resize(rows);
 | 
|---|
| 190 |       for (size_t i=0;i<rows;++i) {
 | 
|---|
| 191 |         const size_t columns = 2+extractedData.forces[i].size();
 | 
|---|
| 192 |         matrix[i].resize(columns, 0.);
 | 
|---|
| 193 |   //      for (size_t j=0;j<2;++j)
 | 
|---|
| 194 |   //        matrix[i][j] = 0.;
 | 
|---|
| 195 |         for (size_t j=2;j<columns;++j)
 | 
|---|
| 196 |           matrix[i][j] = extractedData.forces[i][j-2];
 | 
|---|
| 197 |       }
 | 
|---|
| 198 |       if (!Force.AddMatrix(
 | 
|---|
| 199 |           std::string("MPQCJob ")+toString(jobid),
 | 
|---|
| 200 |           matrix,
 | 
|---|
| 201 |           nriter->second)) {
 | 
|---|
| 202 |         ELOG(1, "Adding force matrix failed.");
 | 
|---|
| 203 |         return false;
 | 
|---|
| 204 |       }
 | 
|---|
| 205 |     }
 | 
|---|
| 206 |   }
 | 
|---|
| 207 |   // add one more matrix (not required for energy)
 | 
|---|
| 208 |   MatrixContainer::MatrixArray matrix;
 | 
|---|
| 209 |   matrix.resize(1);
 | 
|---|
| 210 |   matrix[0].resize(1, 0.);
 | 
|---|
| 211 |   if (!Energy.AddMatrix(std::string("MPQCJob total"), matrix, FragmentCounter))
 | 
|---|
| 212 |     return false;
 | 
|---|
| 213 |   // but for energy because we need to know total number of atoms
 | 
|---|
| 214 |   matrix.resize(NoAtoms);
 | 
|---|
| 215 |   for (size_t i = 0; i< NoAtoms; ++i)
 | 
|---|
| 216 |     matrix[i].resize(2+NDIM, 0.);
 | 
|---|
| 217 |   if (!Force.AddMatrix(std::string("MPQCJob total"), matrix, FragmentCounter))
 | 
|---|
| 218 |     return false;
 | 
|---|
| 219 | 
 | 
|---|
| 220 |   return true;
 | 
|---|
| 221 | }
 | 
|---|
| 222 | 
 | 
|---|
| 223 | /** Print MPQCData from received results.
 | 
|---|
| 224 |  *
 | 
|---|
| 225 |  * @param fragmentData MPQCData resulting from the jobs, associated to job id
 | 
|---|
| 226 |  * @param KeySetFilename filename with keysets to associate forces correctly
 | 
|---|
| 227 |  * @param NoAtoms total number of atoms
 | 
|---|
| 228 |  * @param full_sample summed up charge from fragments on return
 | 
|---|
| 229 |  */
 | 
|---|
| 230 | bool printReceivedMPQCResults(
 | 
|---|
| 231 |     const std::map<JobId_t, MPQCData> &fragmentData,
 | 
|---|
| 232 |     const std::string &KeySetFilename,
 | 
|---|
| 233 |     size_t NoAtoms)
 | 
|---|
| 234 | {
 | 
|---|
| 235 |   // create a vector of all job ids
 | 
|---|
| 236 |   std::vector<JobId_t> jobids;
 | 
|---|
| 237 |   std::transform(fragmentData.begin(),fragmentData.end(),
 | 
|---|
| 238 |       std::back_inserter(jobids),
 | 
|---|
| 239 |       boost::bind( &std::map<JobId_t,MPQCData>::value_type::first, boost::lambda::_1 )
 | 
|---|
| 240 |   );
 | 
|---|
| 241 | 
 | 
|---|
| 242 |   // create lookup from job nr to fragment number
 | 
|---|
| 243 |   size_t FragmentCounter = 0;
 | 
|---|
| 244 |   const std::map< JobId_t, size_t > MatrixNrLookup=
 | 
|---|
| 245 |       createMatrixNrLookup(jobids, FragmentCounter);
 | 
|---|
| 246 | 
 | 
|---|
| 247 |   // place results into maps
 | 
|---|
| 248 |   EnergyMatrix Energy;
 | 
|---|
| 249 |   ForceMatrix Force;
 | 
|---|
| 250 |   if (!putResultsintoMatrices(fragmentData, MatrixNrLookup, FragmentCounter, NoAtoms, Energy, Force))
 | 
|---|
| 251 |     return false;
 | 
|---|
| 252 | 
 | 
|---|
| 253 |   if (!Energy.InitialiseIndices()) return false;
 | 
|---|
| 254 | 
 | 
|---|
| 255 |   if (!Force.ParseIndices(KeySetFilename.c_str())) return false;
 | 
|---|
| 256 | 
 | 
|---|
| 257 |   // initialise keysets
 | 
|---|
| 258 |   KeySetsContainer KeySet;
 | 
|---|
| 259 |   parseKeySetFile(KeySet, KeySetFilename, FragmentCounter, NonHydrogenKeySets);
 | 
|---|
| 260 |   KeySetsContainer ForceKeySet;
 | 
|---|
| 261 |   parseKeySetFile(ForceKeySet, KeySetFilename, FragmentCounter, HydrogenKeySets);
 | 
|---|
| 262 | 
 | 
|---|
| 263 |   // combine all found data
 | 
|---|
| 264 |   if (!KeySet.ParseManyBodyTerms()) return false;
 | 
|---|
| 265 | 
 | 
|---|
| 266 |   EnergyMatrix EnergyFragments;
 | 
|---|
| 267 |   ForceMatrix ForceFragments;
 | 
|---|
| 268 |   if (!EnergyFragments.AllocateMatrix(Energy.Header, Energy.MatrixCounter, Energy.RowCounter, Energy.ColumnCounter)) return false;
 | 
|---|
| 269 |   if (!ForceFragments.AllocateMatrix(Force.Header, Force.MatrixCounter, Force.RowCounter, Force.ColumnCounter)) return false;
 | 
|---|
| 270 | 
 | 
|---|
| 271 |   if(!Energy.SetLastMatrix(0., 0)) return false;
 | 
|---|
| 272 |   if(!Force.SetLastMatrix(0., 2)) return false;
 | 
|---|
| 273 | 
 | 
|---|
| 274 |   for (int BondOrder=0;BondOrder<KeySet.Order;BondOrder++) {
 | 
|---|
| 275 |     // --------- sum up energy --------------------
 | 
|---|
| 276 |     LOG(1, "INFO: Summing energy of order " << BondOrder+1 << " ...");
 | 
|---|
| 277 |     if (!EnergyFragments.SumSubManyBodyTerms(Energy, KeySet, BondOrder)) return false;
 | 
|---|
| 278 |     if (!Energy.SumSubEnergy(EnergyFragments, NULL, KeySet, BondOrder, 1.)) return false;
 | 
|---|
| 279 | 
 | 
|---|
| 280 |     // --------- sum up Forces --------------------
 | 
|---|
| 281 |     LOG(1, "INFO: Summing forces of order " << BondOrder+1 << " ...");
 | 
|---|
| 282 |     if (!ForceFragments.SumSubManyBodyTerms(Force, KeySet, BondOrder)) return false;
 | 
|---|
| 283 |     if (!Force.SumSubForces(ForceFragments, KeySet, BondOrder, 1.)) return false;
 | 
|---|
| 284 |   }
 | 
|---|
| 285 | 
 | 
|---|
| 286 |   // for debugging print resulting energy and forces
 | 
|---|
| 287 |   LOG(1, "INFO: Resulting energy is " << Energy.Matrix[ FragmentCounter ][0][0]);
 | 
|---|
| 288 |   std::stringstream output;
 | 
|---|
| 289 |   for (int i=0; i< Force.RowCounter[FragmentCounter]; ++i) {
 | 
|---|
| 290 |     for (int j=0; j< Force.ColumnCounter[FragmentCounter]; ++j)
 | 
|---|
| 291 |       output << Force.Matrix[ FragmentCounter ][i][j] << " ";
 | 
|---|
| 292 |     output << "\n";
 | 
|---|
| 293 |   }
 | 
|---|
| 294 |   LOG(1, "INFO: Resulting forces are " << std::endl << output.str());
 | 
|---|
| 295 | 
 | 
|---|
| 296 |   return true;
 | 
|---|
| 297 | }
 | 
|---|
| 298 | 
 | 
|---|
| 299 | void writeToFile(const std::string &filename, const std::string contents)
 | 
|---|
| 300 | {
 | 
|---|
| 301 |   std::ofstream tablefile(filename.c_str());
 | 
|---|
| 302 |   tablefile << contents;
 | 
|---|
| 303 |   tablefile.close();
 | 
|---|
| 304 | }
 | 
|---|
| 305 | 
 | 
|---|
| 306 | /** Print MPQCData from received results.
 | 
|---|
| 307 |  *
 | 
|---|
| 308 |  * @param results summed up results container
 | 
|---|
| 309 |  */
 | 
|---|
| 310 | void printReceivedFullResults(
 | 
|---|
| 311 |     const FragmentationResults &results)
 | 
|---|
| 312 | {
 | 
|---|
| 313 |   // print tables (without eigenvalues, they go extra)
 | 
|---|
| 314 |   {
 | 
|---|
| 315 |     typedef boost::mpl::remove<MPQCDataEnergyVector_t, MPQCDataFused::energy_eigenvalues>::type
 | 
|---|
| 316 |       MPQCDataEnergyVector_noeigenvalues_t;
 | 
|---|
| 317 |     const std::string energyresult =
 | 
|---|
| 318 |         writeTable<MPQCDataEnergyMap_t, MPQCDataEnergyVector_noeigenvalues_t >()(
 | 
|---|
| 319 |             results.Result_Energy_fused, results.getMaxLevel());
 | 
|---|
| 320 |     LOG(0, "Energy table is \n" << energyresult);
 | 
|---|
| 321 |     std::string filename;
 | 
|---|
| 322 |     filename += FRAGMENTPREFIX + std::string("_Energy.dat");
 | 
|---|
| 323 |     writeToFile(filename, energyresult);
 | 
|---|
| 324 |   }
 | 
|---|
| 325 | 
 | 
|---|
| 326 |   {
 | 
|---|
| 327 |     const std::string gridresult =
 | 
|---|
| 328 |         writeTable<VMGDataMap_t, VMGDataVector_t >()(
 | 
|---|
| 329 |             results.Result_LongRange_fused, results.getMaxLevel(), 2);
 | 
|---|
| 330 |     LOG(0, "VMG table is \n" << gridresult);
 | 
|---|
| 331 |     std::string filename;
 | 
|---|
| 332 |     filename += FRAGMENTPREFIX + std::string("_VMGEnergy.dat");
 | 
|---|
| 333 |     writeToFile(filename, gridresult);
 | 
|---|
| 334 |   }
 | 
|---|
| 335 | 
 | 
|---|
| 336 |   {
 | 
|---|
| 337 |     const std::string gridresult =
 | 
|---|
| 338 |         writeTable<VMGDataLongRangeMap_t, VMGDataLongRangeVector_t >()(
 | 
|---|
| 339 |             results.Result_LongRangeIntegrated_fused, results.getMaxLevel(), 2);
 | 
|---|
| 340 |     LOG(0, "LongRange table is \n" << gridresult);
 | 
|---|
| 341 |     std::string filename;
 | 
|---|
| 342 |     filename += FRAGMENTPREFIX + std::string("_LongRangeEnergy.dat");
 | 
|---|
| 343 |     writeToFile(filename, gridresult);
 | 
|---|
| 344 |   }
 | 
|---|
| 345 | 
 | 
|---|
| 346 |   {
 | 
|---|
| 347 |     const std::string eigenvalueresult;
 | 
|---|
| 348 |     LOG(0, "Eigenvalue table is \n" << eigenvalueresult);
 | 
|---|
| 349 |     std::string filename;
 | 
|---|
| 350 |     filename += FRAGMENTPREFIX + std::string("_Eigenvalues.dat");
 | 
|---|
| 351 |     writeToFile(filename, eigenvalueresult);
 | 
|---|
| 352 |   }
 | 
|---|
| 353 | 
 | 
|---|
| 354 |   {
 | 
|---|
| 355 |     const std::string forceresult =
 | 
|---|
| 356 |         writeTable<MPQCDataForceMap_t, MPQCDataForceVector_t>()(
 | 
|---|
| 357 |             results.Result_Force_fused, results.getMaxLevel());
 | 
|---|
| 358 |     LOG(0, "Force table is \n" << forceresult);
 | 
|---|
| 359 |     std::string filename;
 | 
|---|
| 360 |     filename += FRAGMENTPREFIX + std::string("_Forces.dat");
 | 
|---|
| 361 |     writeToFile(filename, forceresult);
 | 
|---|
| 362 |   }
 | 
|---|
| 363 |   // we don't want to print grid to a table
 | 
|---|
| 364 |   {
 | 
|---|
| 365 |     // print times (without flops for now)
 | 
|---|
| 366 |     typedef boost::mpl::remove<
 | 
|---|
| 367 |         boost::mpl::remove<MPQCDataTimeVector_t, MPQCDataFused::times_total_flops>::type,
 | 
|---|
| 368 |         MPQCDataFused::times_gather_flops>::type
 | 
|---|
| 369 |         MPQCDataTimeVector_noflops_t;
 | 
|---|
| 370 |     const std::string timesresult =
 | 
|---|
| 371 |         writeTable<MPQCDataTimeMap_t, MPQCDataTimeVector_noflops_t >()(
 | 
|---|
| 372 |             results.Result_Time_fused, results.getMaxLevel());
 | 
|---|
| 373 |     LOG(0, "Times table is \n" << timesresult);
 | 
|---|
| 374 |     std::string filename;
 | 
|---|
| 375 |     filename += FRAGMENTPREFIX + std::string("_Times.dat");
 | 
|---|
| 376 |     writeToFile(filename, timesresult);
 | 
|---|
| 377 |   }
 | 
|---|
| 378 | }
 | 
|---|
| 379 | 
 | 
|---|
| 380 | bool appendToHomologyFile(
 | 
|---|
| 381 |     const boost::filesystem::path &homology_file,
 | 
|---|
| 382 |     const FragmentationResults &results,
 | 
|---|
| 383 |     const std::string &KeySetFilename)
 | 
|---|
| 384 | {
 | 
|---|
| 385 |   // read homology container (if present)
 | 
|---|
| 386 |   HomologyContainer homology_container;
 | 
|---|
| 387 |   if (boost::filesystem::exists(homology_file)) {
 | 
|---|
| 388 |     std::ifstream returnstream(homology_file.string().c_str());
 | 
|---|
| 389 |     if (returnstream.good()) {
 | 
|---|
| 390 |       boost::archive::text_iarchive ia(returnstream);
 | 
|---|
| 391 |       ia >> homology_container;
 | 
|---|
| 392 |     } else {
 | 
|---|
| 393 |       ELOG(2, "Failed to parse from " << homology_file.string() << ".");
 | 
|---|
| 394 |     }
 | 
|---|
| 395 |     returnstream.close();
 | 
|---|
| 396 |   } else {
 | 
|---|
| 397 |     LOG(2, "Could not open " << homology_file.string()
 | 
|---|
| 398 |         << ", creating empty container.");
 | 
|---|
| 399 |   }
 | 
|---|
| 400 | 
 | 
|---|
| 401 |   // append all fragments to a HomologyContainer
 | 
|---|
| 402 |   HomologyContainer::container_t values;
 | 
|---|
| 403 |   const size_t FragmentCounter = results.Result_perIndexSet_Energy.size();
 | 
|---|
| 404 | 
 | 
|---|
| 405 |   // convert KeySetContainer to IndexSetContainer
 | 
|---|
| 406 |   IndexSetContainer::ptr ForceContainer(new IndexSetContainer(results.getForceKeySet()));
 | 
|---|
| 407 |   const IndexSetContainer::Container_t &Indices = results.getContainer();
 | 
|---|
| 408 |   const IndexSetContainer::Container_t &ForceIndices = ForceContainer->getContainer();
 | 
|---|
| 409 |   IndexSetContainer::Container_t::const_iterator iter = Indices.begin();
 | 
|---|
| 410 |   IndexSetContainer::Container_t::const_iterator forceiter = ForceIndices.begin();
 | 
|---|
| 411 |   for (;iter != Indices.end(); ++iter, ++forceiter) // go through each IndexSet
 | 
|---|
| 412 |   {
 | 
|---|
| 413 |     // create graph from the force indices (i.e. with hydrogens)
 | 
|---|
| 414 |     HomologyGraph graph(**forceiter);
 | 
|---|
| 415 |     // obtain information for value of HomologyContainer entry with normal indices
 | 
|---|
| 416 |     const IndexSet::ptr &index = *iter;
 | 
|---|
| 417 |     HomologyContainer::value_t value;
 | 
|---|
| 418 |     std::map<IndexSet::ptr, std::pair< MPQCDataFragmentMap_t, MPQCDataFragmentMap_t> >::const_iterator fragmentiter
 | 
|---|
| 419 |         = results.Result_perIndexSet_Fragment.find(index);
 | 
|---|
| 420 |     ASSERT( fragmentiter != results.Result_perIndexSet_Fragment.end(),
 | 
|---|
| 421 |         "appendToHomologyFile() - cannot find index "+toString(*index)
 | 
|---|
| 422 |         +" in FragmentResults.");
 | 
|---|
| 423 |     value.first = boost::fusion::at_key<MPQCDataFused::fragment>(fragmentiter->second.first);
 | 
|---|
| 424 |     std::map<IndexSet::ptr, std::pair< MPQCDataEnergyMap_t, MPQCDataEnergyMap_t> >::const_iterator energyiter
 | 
|---|
| 425 |         = results.Result_perIndexSet_Energy.find(index);
 | 
|---|
| 426 |     ASSERT( energyiter != results.Result_perIndexSet_Energy.end(),
 | 
|---|
| 427 |         "appendToHomologyFile() - cannot find index "+toString(*index)
 | 
|---|
| 428 |         +" in FragmentResults.");
 | 
|---|
| 429 |     value.second = boost::fusion::at_key<MPQCDataFused::energy_total>(energyiter->second.second);
 | 
|---|
| 430 |     values.insert( std::make_pair( graph, value) );
 | 
|---|
| 431 |   }
 | 
|---|
| 432 |   homology_container.insert(values);
 | 
|---|
| 433 | 
 | 
|---|
| 434 |   // store homology container again
 | 
|---|
| 435 |   std::ofstream outputstream(homology_file.string().c_str());
 | 
|---|
| 436 |   if (outputstream.good()) { // check if opened
 | 
|---|
| 437 |     boost::archive::text_oarchive oa(outputstream);
 | 
|---|
| 438 |     oa << homology_container;
 | 
|---|
| 439 |     if (outputstream.fail()) { // check if correctly written
 | 
|---|
| 440 |       LOG(1, "Failed to write to file " << homology_file.string() << ".");
 | 
|---|
| 441 |       return false;
 | 
|---|
| 442 |     } else
 | 
|---|
| 443 |       outputstream.close();
 | 
|---|
| 444 |   } else {
 | 
|---|
| 445 |     LOG(1, "Failed to open file " << homology_file.string()
 | 
|---|
| 446 |         << " for writing.");
 | 
|---|
| 447 |     return false;
 | 
|---|
| 448 |   }
 | 
|---|
| 449 |   return true;
 | 
|---|
| 450 | }
 | 
|---|
| 451 | 
 | 
|---|
| 452 | Action::state_ptr FragmentationFragmentationAutomationAction::performCall() {
 | 
|---|
| 453 |   boost::asio::io_service io_service;
 | 
|---|
| 454 | 
 | 
|---|
| 455 |   // TODO: Have io_service run in second thread and merge with current again eventually
 | 
|---|
| 456 | 
 | 
|---|
| 457 |   size_t Exitflag = 0;
 | 
|---|
| 458 |   std::map<JobId_t, MPQCData> fragmentData;
 | 
|---|
| 459 |   {
 | 
|---|
| 460 |     MPQCFragmentController mpqccontroller(io_service);
 | 
|---|
| 461 |     mpqccontroller.setHost(params.host.get());
 | 
|---|
| 462 |     mpqccontroller.setPort(params.port.get());
 | 
|---|
| 463 |     mpqccontroller.setLevel(params.level.get());
 | 
|---|
| 464 |     // Phase One: obtain ids
 | 
|---|
| 465 |     std::vector< boost::filesystem::path > jobfiles = params.jobfiles.get();
 | 
|---|
| 466 |     mpqccontroller.requestIds(jobfiles.size());
 | 
|---|
| 467 | 
 | 
|---|
| 468 |     // Phase Two: create and add MPQCJobs
 | 
|---|
| 469 |     if (!mpqccontroller.addJobsFromFiles(params.executable.get().string(), jobfiles))
 | 
|---|
| 470 |       return Action::failure;
 | 
|---|
| 471 | 
 | 
|---|
| 472 |     // Phase Three: calculate result
 | 
|---|
| 473 |     mpqccontroller.waitforResults(jobfiles.size());
 | 
|---|
| 474 |     mpqccontroller.getResults(fragmentData);
 | 
|---|
| 475 | 
 | 
|---|
| 476 |     Exitflag += mpqccontroller.getExitflag();
 | 
|---|
| 477 |   }
 | 
|---|
| 478 | 
 | 
|---|
| 479 | #ifdef HAVE_VMG
 | 
|---|
| 480 |   if (params.DoLongrange.get()) {
 | 
|---|
| 481 |   if ( World::getInstance().getAllAtoms().size() == 0) {
 | 
|---|
| 482 |     ELOG(1, "Please load the full molecule into the world before starting this action.");
 | 
|---|
| 483 |     return Action::failure;
 | 
|---|
| 484 |   }
 | 
|---|
| 485 | 
 | 
|---|
| 486 |   // obtain combined charge density
 | 
|---|
| 487 |   FragmentationChargeDensity summedChargeDensity(
 | 
|---|
| 488 |       fragmentData,
 | 
|---|
| 489 |       params.path.get());
 | 
|---|
| 490 |   const std::vector<SamplingGrid> full_sample = summedChargeDensity.getFullSampledGrid();
 | 
|---|
| 491 | 
 | 
|---|
| 492 |   LOG(1, "INFO: There are " << fragmentData.size() << " short-range and "
 | 
|---|
| 493 |       << full_sample.size() << " level-wise long-range jobs.");
 | 
|---|
| 494 | 
 | 
|---|
| 495 |   // Phase Four: obtain more ids
 | 
|---|
| 496 |   std::map<JobId_t, VMGData> longrangeData;
 | 
|---|
| 497 |   {
 | 
|---|
| 498 |     VMGFragmentController vmgcontroller(io_service);
 | 
|---|
| 499 |     vmgcontroller.setHost(params.host.get());
 | 
|---|
| 500 |     vmgcontroller.setPort(params.port.get());
 | 
|---|
| 501 |     const size_t NoJobs = fragmentData.size()+full_sample.size();
 | 
|---|
| 502 |     vmgcontroller.requestIds(NoJobs);
 | 
|---|
| 503 | 
 | 
|---|
| 504 |     // Phase Five: create VMGJobs
 | 
|---|
| 505 |     const size_t near_field_cells = params.near_field_cells.get();
 | 
|---|
| 506 |     const size_t interpolation_degree = params.interpolation_degree.get();
 | 
|---|
| 507 |     if (!vmgcontroller.createLongRangeJobs(
 | 
|---|
| 508 |         fragmentData,
 | 
|---|
| 509 |         full_sample,
 | 
|---|
| 510 |         summedChargeDensity.getFragment(),
 | 
|---|
| 511 |         near_field_cells,
 | 
|---|
| 512 |         interpolation_degree))
 | 
|---|
| 513 |       return Action::failure;
 | 
|---|
| 514 | 
 | 
|---|
| 515 |     // Phase Six: calculate result
 | 
|---|
| 516 |     vmgcontroller.waitforResults(NoJobs);
 | 
|---|
| 517 |     vmgcontroller.getResults(longrangeData);
 | 
|---|
| 518 |     ASSERT( NoJobs == longrangeData.size(),
 | 
|---|
| 519 |         "FragmentationFragmentationAutomationAction::performCall() - number of MPQCresults+"
 | 
|---|
| 520 |         +toString(full_sample.size())+"="+toString(NoJobs)
 | 
|---|
| 521 |         +" and VMGresults "+toString(longrangeData.size())+" don't match.");
 | 
|---|
| 522 |     Exitflag += vmgcontroller.getExitflag();
 | 
|---|
| 523 |   }
 | 
|---|
| 524 | 
 | 
|---|
| 525 |   // remove full solution corresponding to full_sample from map (must be highest ids), has to be treated extra
 | 
|---|
| 526 |   std::map<JobId_t, VMGData>::iterator iter = longrangeData.end();
 | 
|---|
| 527 |   for (size_t i=0;i<full_sample.size();++i)
 | 
|---|
| 528 |     --iter;
 | 
|---|
| 529 |   std::map<JobId_t, VMGData>::iterator remove_iter = iter;
 | 
|---|
| 530 |   std::vector<VMGData> fullsolutionData;
 | 
|---|
| 531 |   for (; iter != longrangeData.end(); ++iter)
 | 
|---|
| 532 |     fullsolutionData.push_back(iter->second);
 | 
|---|
| 533 |   longrangeData.erase(remove_iter, longrangeData.end());
 | 
|---|
| 534 | 
 | 
|---|
| 535 |   // Final phase: sum up and print result
 | 
|---|
| 536 |   FragmentationResults results(
 | 
|---|
| 537 |       fragmentData,
 | 
|---|
| 538 |       longrangeData,
 | 
|---|
| 539 |       params.path.get());
 | 
|---|
| 540 |   results(
 | 
|---|
| 541 |       fragmentData,
 | 
|---|
| 542 |       longrangeData,
 | 
|---|
| 543 |       fullsolutionData,
 | 
|---|
| 544 |       full_sample);
 | 
|---|
| 545 |   {
 | 
|---|
| 546 |     LOG(1, "INFO: Parsing fragment files from " << params.path.get() << ".");
 | 
|---|
| 547 |     printReceivedFullResults(results);
 | 
|---|
| 548 |   }
 | 
|---|
| 549 | 
 | 
|---|
| 550 |   // append all keysets to homology file
 | 
|---|
| 551 |   if (Exitflag == 0) {
 | 
|---|
| 552 |     const boost::filesystem::path &homology_file = params.homology_file.get();
 | 
|---|
| 553 |     if (homology_file.string() != "") {
 | 
|---|
| 554 |       LOG(1, "INFO: Appending HomologyGraphs to file " << homology_file.string() << ".");
 | 
|---|
| 555 |       if (!appendToHomologyFile(homology_file, results, params.path.get()))
 | 
|---|
| 556 |         Exitflag = 1;
 | 
|---|
| 557 |     }
 | 
|---|
| 558 |   }
 | 
|---|
| 559 | 
 | 
|---|
| 560 |   std::map<JobId_t, std::string> debugData;
 | 
|---|
| 561 |   {
 | 
|---|
| 562 |     if (!full_sample.empty()) {
 | 
|---|
| 563 |       // create debug jobs for each level to print the summed-up potential to vtk files
 | 
|---|
| 564 |       VMGDebugGridFragmentController debugcontroller(io_service);
 | 
|---|
| 565 |       debugcontroller.setHost(params.host.get());
 | 
|---|
| 566 |       debugcontroller.setPort(params.port.get());
 | 
|---|
| 567 |       debugcontroller.requestIds(full_sample.size());
 | 
|---|
| 568 |       if (!debugcontroller.createDebugJobs(full_sample))
 | 
|---|
| 569 |         return Action::failure;
 | 
|---|
| 570 |       debugcontroller.waitforResults(full_sample.size());
 | 
|---|
| 571 |       debugcontroller.getResults(debugData);
 | 
|---|
| 572 |       Exitflag += debugcontroller.getExitflag();
 | 
|---|
| 573 |     }
 | 
|---|
| 574 |   }
 | 
|---|
| 575 |   }
 | 
|---|
| 576 | #else
 | 
|---|
| 577 |   // Final phase: print result
 | 
|---|
| 578 |   {
 | 
|---|
| 579 |     LOG(1, "INFO: Parsing fragment files from " << params.path.get() << ".");
 | 
|---|
| 580 |     printReceivedMPQCResults(
 | 
|---|
| 581 |         fragmentData,
 | 
|---|
| 582 |         params.path.get(),
 | 
|---|
| 583 |         getNoAtomsFromAdjacencyFile(params.path.get()));
 | 
|---|
| 584 |   }
 | 
|---|
| 585 | #endif
 | 
|---|
| 586 | 
 | 
|---|
| 587 |   return (Exitflag == 0) ? Action::success : Action::failure;
 | 
|---|
| 588 | }
 | 
|---|
| 589 | 
 | 
|---|
| 590 | Action::state_ptr FragmentationFragmentationAutomationAction::performUndo(Action::state_ptr _state) {
 | 
|---|
| 591 |   return Action::success;
 | 
|---|
| 592 | }
 | 
|---|
| 593 | 
 | 
|---|
| 594 | Action::state_ptr FragmentationFragmentationAutomationAction::performRedo(Action::state_ptr _state){
 | 
|---|
| 595 |   return Action::success;
 | 
|---|
| 596 | }
 | 
|---|
| 597 | 
 | 
|---|
| 598 | bool FragmentationFragmentationAutomationAction::canUndo() {
 | 
|---|
| 599 |   return false;
 | 
|---|
| 600 | }
 | 
|---|
| 601 | 
 | 
|---|
| 602 | bool FragmentationFragmentationAutomationAction::shouldUndo() {
 | 
|---|
| 603 |   return false;
 | 
|---|
| 604 | }
 | 
|---|
| 605 | /** =========== end of function ====================== */
 | 
|---|