/** * @file comm_info.cpp * @author Julian Iseringhausen * @date Mon Nov 21 13:27:22 2011 * * @brief Stores some MPI-relevant information. * */ #ifdef HAVE_CONFIG_H #include #endif #ifdef HAVE_MPI #include #include #include #include "base/index.hpp" #include "base/timer.hpp" #include "comm/mpi/comm_info.hpp" #include "grid/grid.hpp" #include "grid/multigrid.hpp" using namespace VMG; VMG::MPI::CommInfo::CommInfo() { } VMG::MPI::CommInfo::~CommInfo() { std::map::iterator c_iter; for (c_iter=communicators.begin(); c_iter!=communicators.end(); ++c_iter) if (c_iter->second != MPI_COMM_NULL) MPI_Comm_free(&c_iter->second); std::map::iterator d_iter; for (d_iter=datatypes.begin(); d_iter!=datatypes.end(); ++d_iter) MPI_Type_free(&d_iter->second); } MPI_Comm VMG::MPI::CommInfo::GetCommunicator(const Grid& grid) { std::map::iterator iter = communicators.find(Key(grid)); if (iter != communicators.end()) return iter->second; int rank; MPI_Comm comm; MPI_Comm_rank(comm_global, &rank); if (grid.Global().SizeLocal().Product() == 0) { MPI_Comm_split(comm_global, MPI_UNDEFINED, rank, &comm); communicators.insert(std::make_pair(Key(grid), comm)); }else { Index dims, periods, coords; MPI_Comm comm_temp; MPI_Cart_get(comm_global, 3, dims.vec(), periods.vec(), coords.vec()); MPI_Comm_split(comm_global, 1, rank, &comm_temp); dims = GetGlobalDims(comm_temp, coords); MPI_Cart_create(comm_temp, 3, dims.vec(), periods.vec(), false, &comm); MPI_Comm_free(&comm_temp); communicators.insert(std::make_pair(Key(grid), comm)); } return comm; } MPI_Comm VMG::MPI::CommInfo::GetUnionCommunicator(const Grid& grid_1, const Grid& grid_2) { std::map::iterator iter = communicators.find(Key(grid_1, grid_2)); if (iter != communicators.end()) return iter->second; int rank; MPI_Comm comm; MPI_Comm_rank(comm_global, &rank); if (grid_1.Global().SizeLocal().Product() == 0 && grid_2.Global().SizeLocal().Product() == 0) { MPI_Comm_split(comm_global, MPI_UNDEFINED, rank, &comm); communicators.insert(std::make_pair(Key(grid_1, grid_2), comm)); }else { Index dims, periods, coords; MPI_Comm comm_temp; MPI_Cart_get(comm_global, 3, dims.vec(), periods.vec(), coords.vec()); MPI_Comm_split(comm_global, 1, rank, &comm_temp); dims = GetGlobalDims(comm_temp, coords); MPI_Cart_create(comm_temp, 3, dims.vec(), periods.vec(), false, &comm); MPI_Comm_free(&comm_temp); communicators.insert(std::make_pair(Key(grid_1, grid_2), comm)); } return comm; } MPI_Datatype VMG::MPI::CommInfo::GetDatatypeSubarray(const Grid& grid, const GridIteratorSet& bounds) { return GetDatatypeSubarray(bounds.Begin().GetBegin(), bounds.Begin().GetEnd(), grid.Local().SizeTotal()); } MPI_Datatype VMG::MPI::CommInfo::GetDatatypeSubarray(const Index& begin, const Index& end, const Index& size_total) { const Key k = Key(begin, end, size_total); std::map::const_iterator iter = datatypes.lower_bound(k); if (iter != datatypes.end() && !(datatypes.key_comp()(k, iter->first))) return iter->second; MPI_Datatype dt; Index sizes = size_total; Index subsizes = end - begin; Index starts = begin; MPI_Type_create_subarray(3, sizes.vec(), subsizes.vec(), starts.vec(), MPI_ORDER_C, MPI_DOUBLE, &dt); MPI_Type_commit(&dt); datatypes.insert(std::make_pair(k, dt)); return dt; } Index VMG::MPI::CommInfo::Pos(const Grid& grid) { Index dims, periods, coords; int status; MPI_Comm comm = GetCommunicator(grid); MPI_Topo_test(comm, &status); assert(status == MPI_CART); MPI_Cart_get(comm, 3, dims.vec(), periods.vec(), coords.vec()); return coords; } Index VMG::MPI::CommInfo::Procs(const Grid& grid) { Index dims, periods, coords; int status; MPI_Comm comm = GetCommunicator(grid); MPI_Topo_test(comm, &status); assert(status == MPI_CART); MPI_Cart_get(comm, 3, dims.vec(), periods.vec(), coords.vec()); return dims; } Index VMG::MPI::CommInfo::GetGlobalDims(MPI_Comm comm, Index pos) { std::set unique_set[3]; Index dims; int size; MPI_Comm_size(comm, &size); int* coordinates = new int[3*size]; MPI_Allgather(pos.vec(), 3, MPI_INT, coordinates, 3, MPI_INT, comm); for (int i=0; i(unique_set[j].size()); delete [] coordinates; return dims; } #endif /* HAVE_MPI */