/* * Project: MoleCuilder * Description: creates and alters molecular systems * Copyright (C) 2010-2012 University of Bonn. All rights reserved. * * * This file is part of MoleCuilder. * * MoleCuilder is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 2 of the License, or * (at your option) any later version. * * MoleCuilder is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with MoleCuilder. If not, see . */ /* * FragmentationAutomationAction.cpp * * Created on: May 18, 2012 * Author: heber */ // include config.h #ifdef HAVE_CONFIG_H #include #endif #include // boost asio needs specific operator new #include #include "CodePatterns/MemDebug.hpp" #include #include "CodePatterns/Assert.hpp" #include "CodePatterns/Info.hpp" #include "CodePatterns/Log.hpp" #include "JobMarket/Controller/FragmentController.hpp" #include "JobMarket/Jobs/FragmentJob.hpp" #include "Atom/atom.hpp" #include "Fragmentation/EnergyMatrix.hpp" #include "Fragmentation/ForceMatrix.hpp" #include "Fragmentation/Fragmentation.hpp" #include "Fragmentation/Histogram/Histogram.hpp" #include "Fragmentation/HydrogenSaturation_enum.hpp" #include "Fragmentation/KeySet.hpp" #include "Fragmentation/KeySetsContainer.hpp" #include "Fragmentation/Summation/Summator.hpp" #include "Graph/DepthFirstSearchAnalysis.hpp" #include "Jobs/MPQCJob.hpp" #include "Jobs/MPQCData.hpp" #include "molecule.hpp" #include "World.hpp" #include #include #include #include "Actions/FragmentationAction/FragmentationAutomationAction.hpp" using namespace MoleCuilder; using namespace boost::assign; // and construct the stuff #include "FragmentationAutomationAction.def" #include "Action_impl_pre.hpp" /** =========== define the function ====================== */ class controller_AddOn; // needs to be defined for using the FragmentController controller_AddOn *getAddOn() { return NULL; } /** Creates a MPQCCommandJob with argument \a filename. * * @param jobs created job is added to this vector * @param command mpqc command to execute * @param filename filename being argument to job * @param nextid id for this job */ void parsejob( std::vector &jobs, const std::string &command, const std::string &filename, const JobId_t nextid) { std::ifstream file; file.open(filename.c_str()); ASSERT( file.good(), "parsejob() - file "+filename+" does not exist."); std::string output((std::istreambuf_iterator(file)), std::istreambuf_iterator()); FragmentJob::ptr testJob( new MPQCJob(nextid, output) ); jobs.push_back(testJob); file.close(); LOG(1, "INFO: Added MPQCCommandJob from file "+filename+"."); } /** Helper function to get number of atoms somehow. * * Here, we just parse the number of lines in the adjacency file as * it should correspond to the number of atoms, except when some atoms * are not bonded, but then fragmentation makes no sense. * * @param path path to the adjacency file */ size_t getNoAtomsFromAdjacencyFile(const std::string &path) { size_t NoAtoms = 0; // parse in special file to get atom count (from line count) std::string filename(path); filename += FRAGMENTPREFIX; filename += ADJACENCYFILE; std::ifstream adjacency(filename.c_str()); if (adjacency.fail()) { LOG(0, endl << "getNoAtomsFromAdjacencyFile() - Unable to open " << filename << ", is the directory correct?"); return false; } std::string buffer; while (getline(adjacency, buffer)) NoAtoms++; LOG(1, "INFO: There are " << NoAtoms << " atoms."); return NoAtoms; } /** Print MPQCData from received results. * * @param results received results to extract MPQCData from * @param KeySetFilename filename with keysets to associate forces correctly * @param NoAtoms total number of atoms */ bool printReceivedMPQCResults( const std::vector &results, const std::string &KeySetFilename, size_t NoAtoms) { EnergyMatrix Energy; EnergyMatrix EnergyFragments; ForceMatrix Force; ForceMatrix ForceFragments; // align fragments std::map< JobId_t, size_t > MatrixNrLookup; size_t FragmentCounter = 0; { // bring ids in order ... typedef std::map< JobId_t, FragmentResult::ptr> IdResultMap_t; IdResultMap_t IdResultMap; for (std::vector::const_iterator iter = results.begin(); iter != results.end(); ++iter) { #ifndef NDEBUG std::pair< IdResultMap_t::iterator, bool> inserter = #endif IdResultMap.insert( make_pair((*iter)->getId(), *iter) ); ASSERT( inserter.second, "printReceivedMPQCResults() - two results have same id " +toString((*iter)->getId())+"."); } // ... and fill lookup for(IdResultMap_t::const_iterator iter = IdResultMap.begin(); iter != IdResultMap.end(); ++iter) MatrixNrLookup.insert( make_pair(iter->first, FragmentCounter++) ); } LOG(1, "INFO: There are " << FragmentCounter << " fragments."); // extract results std::vector fragmentData(results.size()); MPQCData combinedData; LOG(2, "DEBUG: Parsing now through " << results.size() << " results."); for (std::vector::const_iterator iter = results.begin(); iter != results.end(); ++iter) { LOG(1, "RESULT: job #"+toString((*iter)->getId())+": "+toString((*iter)->result)); MPQCData extractedData; std::stringstream inputstream((*iter)->result); LOG(2, "DEBUG: First 50 characters FragmentResult's string: "+(*iter)->result.substr(0, 50)); boost::archive::text_iarchive ia(inputstream); ia >> extractedData; LOG(1, "INFO: extracted data is " << extractedData << "."); // place results into EnergyMatrix ... { MatrixContainer::MatrixArray matrix; matrix.resize(1); matrix[0].resize(1, extractedData.energies.total); if (!Energy.AddMatrix( std::string("MPQCJob ")+toString((*iter)->getId()), matrix, MatrixNrLookup[(*iter)->getId()])) { ELOG(1, "Adding energy matrix failed."); return false; } } // ... and ForceMatrix (with two empty columns in front) { MatrixContainer::MatrixArray matrix; const size_t rows = extractedData.forces.size(); matrix.resize(rows); for (size_t i=0;igetId()), matrix, MatrixNrLookup[(*iter)->getId()])) { ELOG(1, "Adding force matrix failed."); return false; } } } // add one more matrix (not required for energy) MatrixContainer::MatrixArray matrix; matrix.resize(1); matrix[0].resize(1, 0.); if (!Energy.AddMatrix(std::string("MPQCJob total"), matrix, FragmentCounter)) return false; // but for energy because we need to know total number of atoms matrix.resize(NoAtoms); for (size_t i = 0; i< NoAtoms; ++i) matrix[i].resize(2+NDIM, 0.); if (!Force.AddMatrix(std::string("MPQCJob total"), matrix, FragmentCounter)) return false; // initialise indices KeySetsContainer KeySet; if (!Energy.InitialiseIndices()) return false; if (!Force.ParseIndices(KeySetFilename.c_str())) return false; if (!KeySet.ParseKeySets(KeySetFilename.c_str(), Force.RowCounter, Force.MatrixCounter)) return false; /// prepare for OrthogonalSummation // gather all present indices in AllIndices IndexSet::ptr AllIndices(new IndexSet); for (KeySetsContainer::ArrayOfIntVectors::const_iterator iter = KeySet.KeySets.begin(); iter != KeySet.KeySets.end(); ++iter) for(KeySetsContainer::IntVector::const_iterator keyiter = (*iter).begin(); keyiter != (*iter).end(); ++keyiter) { if (*keyiter != -1) (*AllIndices) += *keyiter; } LOG(1, "INFO: AllIndices is " << AllIndices << "."); // create container with all keysets IndexSetContainer::ptr container(new IndexSetContainer(AllIndices)); for (KeySetsContainer::ArrayOfIntVectors::const_iterator iter = KeySet.KeySets.begin(); iter != KeySet.KeySets.end(); ++iter) { IndexSet tempset; for(KeySetsContainer::IntVector::const_iterator keyiter = (*iter).begin(); keyiter != (*iter).end(); ++keyiter) if (*keyiter != -1) tempset += *keyiter; container->insert(tempset); } // create the map of all keysets SubsetMap::ptr subsetmap(new SubsetMap(*container)); // convert all MPQCData to MPQCDataMap_t std::vector MPQCData_fused; MPQCData_fused.reserve(fragmentData.size()); for(std::vector::const_iterator dataiter = fragmentData.begin(); dataiter != fragmentData.end(); ++dataiter) { MPQCDataMap_t instance; boost::fusion::at_key(instance) = dataiter->energies.total; // boost::fusion::at_key(instance) = dataiter->energies.eigenvalues; // boost::fusion::at_key(instance) = dataiter->forces; // boost::fusion::at_key(instance) = dataiter->times.walltime; } // get a vector of a job ids std::vector jobids(results.size(), JobId::IllegalJob); // std::transform(results.begin(), results.end(), jobids.begin(), // boost::bind(&FragmentResult::getId, // boost::bind(&FragmentResult::ptr::, _1))); std::vector::iterator iditer = jobids.begin(); for (std::vector::const_iterator resultiter = results.begin(); resultiter != results.end(); ++resultiter) { *iditer = (*resultiter)->getId(); ++iditer; } // associate each index set with its value (do not use the full set, that is also contained!) const IndexSetContainer::Container_t &_container = container->getContainer(); Summator sum_energy_total( subsetmap, MPQCData_fused, jobids, _container, MatrixNrLookup ); double energy_total = sum_energy_total(); LOG(0, "STATUS: Resulting total energy is " << energy_total << "."); // Summator sum_energy_eigenvalues( // subsetmap, // MPQCData_fused, // jobids, // _container, // MatrixNrLookup // ); // associate each index set with its value (do not use the full set, that is also contained!) // { // OrthogonalSummation::InputSets_t indices(_container.begin(), _container.end()-1); // OrthogonalSummation::InputValues_t values(_container.size()-1, 0.); // std::vector::const_iterator dataiter = fragmentData.begin(); // std::vector::const_iterator resultiter = results.begin(); // for (; dataiter != fragmentData.end(); ++dataiter, ++resultiter) { // const MPQCData &extractedData = *dataiter; // values[ MatrixNrLookup[(*resultiter)->getId()] ] = extractedData.energies.total; // } // // // create the summation functor and evaluate // OrthogonalSummation OS(indices, values, subsetmap); // const double energyresult = OS(); // LOG(0, "STATUS: Resulting energy is " << energyresult << "."); // } // { // OrthogonalSummation::InputSets_t indices(_container.begin(), _container.end()-1); // OrthogonalSummation::InputValues_t values(_container.size()-1); // std::vector::const_iterator dataiter = fragmentData.begin(); // std::vector::const_iterator resultiter = results.begin(); // for (; dataiter != fragmentData.end(); ++dataiter, ++resultiter) { // const MPQCData &extractedData = *dataiter; // values[ MatrixNrLookup[(*resultiter)->getId()] ] = Histogram(extractedData.energies.eigenvalues, 0., 0.1); // } // // // create the summation functor and evaluate // OrthogonalSummation OS(indices, values, subsetmap); // const Histogram eigenvaluegram = OS(); // LOG(0, "STATUS: Resulting histogram is " << eigenvaluegram << "."); // } // combine all found data if (!KeySet.ParseManyBodyTerms()) return false; if (!EnergyFragments.AllocateMatrix(Energy.Header, Energy.MatrixCounter, Energy.RowCounter, Energy.ColumnCounter)) return false; if (!ForceFragments.AllocateMatrix(Force.Header, Force.MatrixCounter, Force.RowCounter, Force.ColumnCounter)) return false; if(!Energy.SetLastMatrix(0., 0)) return false; if(!Force.SetLastMatrix(0., 2)) return false; for (int BondOrder=0;BondOrder &jobfiles) { std::vector jobs; for (std::vector< boost::filesystem::path >::const_iterator iter = jobfiles.begin(); iter != jobfiles .end(); ++iter) { const std::string &filename = (*iter).string(); if (boost::filesystem::exists(filename)) { const JobId_t next_id = controller.getAvailableId(); LOG(1, "INFO: Creating MPQCCommandJob with filename'" +filename+"', and id "+toString(next_id)+"."); parsejob(jobs, params.executable.get().string(), filename, next_id); } else { ELOG(1, "Fragment job "+filename+" does not exist."); return false; } } controller.addJobs(jobs); controller.sendJobs(params.host.get(), params.port.get()); return true; } void WaitforResults( boost::asio::io_service &io_service, FragmentController &controller, const FragmentationFragmentationAutomationAction::FragmentationFragmentationAutomationParameters ¶ms, const size_t NoExpectedResults ) { size_t NoCalculatedResults = 0; while (NoCalculatedResults != NoExpectedResults) { // wait a bit boost::asio::deadline_timer timer(io_service); timer.expires_from_now(boost::posix_time::milliseconds(500)); timer.wait(); // then request status controller.checkResults(params.host.get(), params.port.get()); RunService(io_service, "Checking on results"); const std::pair JobStatus = controller.getJobStatus(); LOG(1, "INFO: #" << JobStatus.first << " are waiting in the queue and #" << JobStatus.second << " jobs are calculated so far."); NoCalculatedResults = JobStatus.second; } } Action::state_ptr FragmentationFragmentationAutomationAction::performCall() { boost::asio::io_service io_service; FragmentController controller(io_service); // TODO: Have io_service run in second thread and merge with current again eventually // Phase One: obtain ids std::vector< boost::filesystem::path > jobfiles = params.jobfiles.get(); requestIds(controller, params, jobfiles.size()); RunService(io_service, "Requesting ids"); // Phase Two: create and add jobs if (!createJobsFromFiles(controller, params, jobfiles)) return Action::failure; RunService(io_service, "Adding jobs"); // Phase Three: calculate result WaitforResults(io_service, controller, params, jobfiles.size()); // Phase Three: get result controller.receiveResults(params.host.get(), params.port.get()); RunService(io_service, "Phase Four"); // Final phase: print result { LOG(1, "INFO: Parsing fragment files from " << params.path.get() << "."); std::vector results = controller.getReceivedResults(); printReceivedMPQCResults( results, params.path.get(), getNoAtomsFromAdjacencyFile(params.path.get())); } size_t Exitflag = controller.getExitflag(); return (Exitflag == 0) ? Action::success : Action::failure; } Action::state_ptr FragmentationFragmentationAutomationAction::performUndo(Action::state_ptr _state) { return Action::success; } Action::state_ptr FragmentationFragmentationAutomationAction::performRedo(Action::state_ptr _state){ return Action::success; } bool FragmentationFragmentationAutomationAction::canUndo() { return false; } bool FragmentationFragmentationAutomationAction::shouldUndo() { return false; } /** =========== end of function ====================== */