Changeset 2112b1
- Timestamp:
- Dec 8, 2011, 12:53:53 PM (14 years ago)
- Children:
- 1610dc
- Parents:
- 138f86
- Location:
- src
- Files:
-
- 2 added
- 4 deleted
- 5 edited
-
Makefile.am (modified) (1 diff)
-
comm/comm_mpi.cpp (modified) (6 diffs)
-
comm/comm_mpi.hpp (modified) (4 diffs)
-
comm/mpi/aux.hpp (deleted)
-
comm/mpi/comm_info.cpp (modified) (8 diffs)
-
comm/mpi/comm_info.hpp (modified) (4 diffs)
-
comm/mpi/comm_key.cpp (deleted)
-
comm/mpi/comm_key.hpp (deleted)
-
comm/mpi/datatype.hpp (deleted)
-
comm/mpi/key.cpp (added)
-
comm/mpi/key.hpp (added)
Legend:
- Unmodified
- Added
- Removed
-
src/Makefile.am
r138f86 r2112b1 46 46 comm/mpi/comm_info.cpp \ 47 47 comm/mpi/comm_info.hpp \ 48 comm/mpi/comm_key.cpp \49 comm/mpi/comm_key.hpp \50 comm/mpi/datatype.hpp \51 48 comm/mpi/error_handler.cpp \ 52 49 comm/mpi/error_handler.hpp \ 53 50 comm/mpi/has_request_vec.hpp \ 51 comm/mpi/key.cpp \ 52 comm/mpi/key.hpp \ 54 53 commands/com_check_consistency.cpp \ 55 54 commands/com_check_iteration_counter.cpp \ -
src/comm/comm_mpi.cpp
r138f86 r2112b1 28 28 #include "base/timer.hpp" 29 29 #include "comm/comm_mpi.hpp" 30 #include "comm/mpi/datatype.hpp"31 30 #include "grid/grid.hpp" 32 31 #include "grid/multigrid.hpp" … … 282 281 283 282 if (has_halo_1[i]) { 284 285 MPI_Datatype dts1 = GetDatatype(grid, grid.Iterators().Halo1()[i]); 283 MPI_Datatype dts1 = comm_info.GetDatatypeSubarray(grid, grid.Iterators().Halo1()[i]); 286 284 MPI_Isend(&grid(0), 1, dts1, neighbors.Left(), 1, comm, &Request()); 287 MPI_Type_free(&dts1);288 285 } 289 286 290 287 if (has_halo_2[i]) { 291 288 292 MPI_Datatype dts2 = GetDatatype(grid, grid.Iterators().Halo2()[i]);289 MPI_Datatype dts2 = comm_info.GetDatatypeSubarray(grid, grid.Iterators().Halo2()[i]); 293 290 MPI_Isend(&grid(0), 1, dts2, neighbors.Right(), 2, comm, &Request()); 294 MPI_Type_free(&dts2); 295 296 MPI_Datatype dtr1 = GetDatatype(grid, grid.Iterators().NearBoundary2()[i]); 291 292 MPI_Datatype dtr1 = comm_info.GetDatatypeSubarray(grid, grid.Iterators().NearBoundary2()[i]); 297 293 ReceiveAndAddSubgrid(grid, comm, dtr1, neighbors.Right(), 1); 298 MPI_Type_free(&dtr1); 294 299 295 } 300 296 301 297 if (has_halo_1[i]) { 302 303 MPI_Datatype dtr2 = GetDatatype(grid, grid.Iterators().NearBoundary1()[i]); 298 MPI_Datatype dtr2 = comm_info.GetDatatypeSubarray(grid, grid.Iterators().NearBoundary1()[i]); 304 299 ReceiveAndAddSubgrid(grid, comm, dtr2, neighbors.Left(), 2); 305 MPI_Type_free(&dtr2);306 307 300 } 308 301 … … 337 330 338 331 if (has_halo_1[i]) { 339 dts_left = GetDatatype(grid, grid.Iterators().NearBoundary1()[i]);340 dtr_left = GetDatatype(grid, grid.Iterators().Halo1()[i]);332 dts_left = comm_info.GetDatatypeSubarray(grid, grid.Iterators().NearBoundary1()[i]); 333 dtr_left = comm_info.GetDatatypeSubarray(grid, grid.Iterators().Halo1()[i]); 341 334 num_left = 1; 342 335 }else { … … 347 340 348 341 if (has_halo_2[i]) { 349 dts_right = GetDatatype(grid, grid.Iterators().NearBoundary2()[i]);350 dtr_right = GetDatatype(grid, grid.Iterators().Halo2()[i]);342 dts_right = comm_info.GetDatatypeSubarray(grid, grid.Iterators().NearBoundary2()[i]); 343 dtr_right = comm_info.GetDatatypeSubarray(grid, grid.Iterators().Halo2()[i]); 351 344 num_right = 1; 352 345 }else { … … 364 357 comm, MPI_STATUS_IGNORE); 365 358 Timer::Stop("MPI_Sendrecv"); 366 367 if (has_halo_1[i]) {368 MPI_Type_free(&dts_left);369 MPI_Type_free(&dtr_left);370 }371 372 if (has_halo_2[i]) {373 MPI_Type_free(&dts_right);374 MPI_Type_free(&dtr_right);375 }376 359 377 360 } … … 1116 1099 } 1117 1100 1118 MPI_Datatype CommMPI::GetDatatype(const Index& start, const Index& end, const Index& grid_size)1119 {1120 MPI_Datatype type;1121 Index sizes = grid_size;1122 Index subsizes = end - start;1123 Index starts = start;1124 1125 MPI_Type_create_subarray(3, sizes.vec(), subsizes.vec(), starts.vec(), MPI_ORDER_C, MPI_DOUBLE, &type);1126 1127 MPI_Type_commit(&type);1128 1129 return type;1130 }1131 1132 MPI_Datatype CommMPI::GetDatatype(const Grid& grid, const GridIteratorSet& bounds)1133 {1134 return GetDatatype(bounds.Begin().GetBegin(), bounds.Begin().GetEnd(), grid.Local().SizeTotal());1135 }1136 1137 1101 void CommMPI::CreateOutputFiles(const Grid& grid, const std::stringstream& serial_data, const char* information, 1138 1102 const Index& begin_global, const Index& end_global, -
src/comm/comm_mpi.hpp
r138f86 r2112b1 26 26 #include "comm/comm.hpp" 27 27 #include "comm/mpi/comm_info.hpp" 28 #include "comm/mpi/datatype.hpp"29 28 #include "comm/mpi/has_request_vec.hpp" 30 29 … … 32 31 { 33 32 34 class DatatypeInfo;35 33 class DomainDecomposition; 36 34 class GridIteratorSet; … … 122 120 VMG::MPI::CommInfo comm_info; 123 121 124 std::vector<MPI_Request> request_vec;125 126 122 MPI_Comm comm_global; 127 123 bool win_created; … … 132 128 void ReceiveAndAddSubgrid(Grid& grid, const MPI_Comm& comm, MPI_Datatype& type, 133 129 const int& rank, const int& tag); 134 135 MPI_Datatype GetDatatype(const Index& start, const Index& end, const Index& grid_size);136 MPI_Datatype GetDatatype(const Grid& grid, const GridIteratorSet& bounds);137 130 }; 138 131 -
src/comm/mpi/comm_info.cpp
r138f86 r2112b1 21 21 #include "base/index.hpp" 22 22 #include "comm/mpi/comm_info.hpp" 23 #include "comm/mpi/datatype.hpp"24 23 #include "grid/grid.hpp" 25 24 #include "grid/multigrid.hpp" … … 33 32 VMG::MPI::CommInfo::~CommInfo() 34 33 { 35 std::map<CommKey, MPI_Comm>::iterator iter; 36 37 for (iter=communicators.begin(); iter!=communicators.end(); ++iter) 38 if (iter->second != MPI_COMM_NULL) 39 MPI_Comm_free(&(iter->second)); 34 std::map<Key, MPI_Comm>::iterator c_iter; 35 for (c_iter=communicators.begin(); c_iter!=communicators.end(); ++c_iter) 36 if (c_iter->second != MPI_COMM_NULL) 37 MPI_Comm_free(&c_iter->second); 38 39 std::map<Key, MPI_Datatype>::iterator d_iter; 40 for (d_iter=datatypes.begin(); d_iter!=datatypes.end(); ++d_iter) 41 MPI_Type_free(&d_iter->second); 40 42 } 41 43 42 44 MPI_Comm VMG::MPI::CommInfo::GetCommunicator(const Grid& grid) 43 45 { 44 return GetCommunicator(grid.Global(), reinterpret_cast<const void*>(&grid)); 45 } 46 47 MPI_Comm VMG::MPI::CommInfo::GetCommunicator(const GlobalIndices& global, const void* addr) 48 { 49 std::map<CommKey, MPI_Comm>::iterator iter = communicators.find(CommKey(global, addr)); 46 std::map<Key, MPI_Comm>::iterator iter = communicators.find(Key(grid)); 50 47 51 48 if (iter != communicators.end()) … … 57 54 MPI_Comm_rank(comm_global, &rank); 58 55 59 if (g lobal.SizeLocal().Product() == 0) {56 if (grid.Global().SizeLocal().Product() == 0) { 60 57 61 58 MPI_Comm_split(comm_global, MPI_UNDEFINED, rank, &comm); 62 59 63 communicators.insert(std::make_pair( CommKey(global, addr), comm));60 communicators.insert(std::make_pair(Key(grid), comm)); 64 61 65 62 }else { … … 78 75 MPI_Comm_free(&comm_temp); 79 76 80 communicators.insert(std::make_pair( CommKey(global, addr), comm));77 communicators.insert(std::make_pair(Key(grid), comm)); 81 78 82 79 } … … 87 84 MPI_Comm VMG::MPI::CommInfo::GetUnionCommunicator(const Grid& grid_1, const Grid& grid_2) 88 85 { 89 std::map< CommKey, MPI_Comm>::iterator iter = communicators.find(CommKey(grid_1, grid_2));86 std::map<Key, MPI_Comm>::iterator iter = communicators.find(Key(grid_1, grid_2)); 90 87 91 88 if (iter != communicators.end()) … … 101 98 MPI_Comm_split(comm_global, MPI_UNDEFINED, rank, &comm); 102 99 103 communicators.insert(std::make_pair( CommKey(grid_1, grid_2), comm));100 communicators.insert(std::make_pair(Key(grid_1, grid_2), comm)); 104 101 105 102 }else { … … 118 115 MPI_Comm_free(&comm_temp); 119 116 120 communicators.insert(std::make_pair( CommKey(grid_1, grid_2), comm));117 communicators.insert(std::make_pair(Key(grid_1, grid_2), comm)); 121 118 122 119 } … … 124 121 return comm; 125 122 } 126 127 VMG::MPI::Datatypes& VMG::MPI::CommInfo::GetDatatypes(const Grid& grid) 128 { 129 std::map<const Grid*, Datatypes>::iterator iter = datatypes.find(&grid); 130 131 if (iter == datatypes.end()) { 132 133 iter = datatypes.insert(std::make_pair(&grid, Datatypes())).first; 134 135 int rank, size; 136 Index sizes, subsizes, starts; 137 138 MPI_Comm comm = GetCommunicator(grid.Level()); 139 140 if (comm != MPI_COMM_NULL) { 141 142 MPI_Comm_rank(comm, &rank); 143 MPI_Comm_size(comm, &size); 144 145 int* buffer = new int[6*size]; 146 147 for (int i=0; i<3; ++i) { 148 149 buffer[6*rank + i ] = grid.Global().BeginLocal()[i]; 150 buffer[6*rank + i+3] = grid.Global().SizeLocal()[i]; 151 152 } 153 154 MPI_Allgather(MPI_IN_PLACE, 6, MPI_INT, buffer, 6, MPI_INT, comm); 155 156 sizes = grid.Global().SizeGlobal(); 157 158 for (int i=0; i<size; ++i) { 159 160 for (int j=0; j<3; ++j) { 161 162 starts[j] = buffer[6*i + j]; 163 subsizes[j] = buffer[6*i + j+3]; 164 165 } 166 167 Datatype dt(starts, starts+subsizes, sizes, comm); 168 169 iter->second.push_back(dt); 170 171 } 172 173 delete [] buffer; 174 175 } 176 177 } 178 179 return iter->second; 123 MPI_Datatype VMG::MPI::CommInfo::GetDatatypeSubarray(const Grid& grid, const GridIteratorSet& bounds) 124 { 125 return GetDatatypeSubarray(bounds.Begin().GetBegin(), bounds.Begin().GetEnd(), grid.Local().SizeTotal()); 126 } 127 128 129 MPI_Datatype VMG::MPI::CommInfo::GetDatatypeSubarray(const Index& begin, const Index& end, const Index& size_total) 130 { 131 std::map<Key, MPI_Datatype>::iterator iter = datatypes.find(Key(begin, end, size_total)); 132 133 if (iter != datatypes.end()) 134 return iter->second; 135 136 MPI_Datatype dt; 137 Index sizes = size_total; 138 Index subsizes = end - begin; 139 Index starts = begin; 140 141 MPI_Type_create_subarray(3, sizes.vec(), subsizes.vec(), starts.vec(), MPI_ORDER_C, MPI_DOUBLE, &dt); 142 143 MPI_Type_commit(&dt); 144 145 return dt; 180 146 } 181 147 -
src/comm/mpi/comm_info.hpp
r138f86 r2112b1 20 20 21 21 #include "base/tuple.hpp" 22 #include "comm/mpi/comm_key.hpp" 23 #include "comm/mpi/datatype.hpp" 22 #include "comm/mpi/key.hpp" 24 23 25 24 namespace VMG … … 28 27 class Index; 29 28 class Grid; 30 class Multigrid;29 class GridIteratorSet; 31 30 32 31 namespace MPI … … 40 39 41 40 MPI_Comm GetCommunicator(const Grid& grid); 42 MPI_Comm GetCommunicator(const GlobalIndices& global, const void* addr);43 41 MPI_Comm GetUnionCommunicator(const Grid& grid_1, const Grid& grid_2); 44 Datatypes& GetDatatypes(const Grid& grid); 42 43 MPI_Datatype GetDatatypeSubarray(const Grid& grid, const GridIteratorSet& bounds); 44 MPI_Datatype GetDatatypeSubarray(const Index& begin, const Index& end, const Index& size_total); 45 45 46 46 Index Pos(const Grid& grid); … … 53 53 54 54 private: 55 std::map< const Grid*, VMG::MPI::Datatypes> datatypes;56 std::map< CommKey, MPI_Comm> communicators;55 std::map<Key, MPI_Comm> communicators; 56 std::map<Key, MPI_Datatype> datatypes; 57 57 58 58 MPI_Comm comm_global;
Note:
See TracChangeset
for help on using the changeset viewer.
