source: src/comm/mpi/comm_info.cpp@ 76e019

Last change on this file since 76e019 was 97c25dd, checked in by Julian Iseringhausen <isering@…>, 14 years ago

Work on a red-black communication routine to half the communication amount.

git-svn-id: https://svn.version.fz-juelich.de/scafacos/trunk@1247 5161e1c8-67bf-11de-9fd5-51895aff932f

  • Property mode set to 100644
File size: 4.6 KB
RevLine 
[dfed1c]1/**
2 * @file comm_info.cpp
3 * @author Julian Iseringhausen <isering@ins.uni-bonn.de>
4 * @date Mon Nov 21 13:27:22 2011
5 *
6 * @brief Stores some MPI-relevant information.
7 *
8 */
9
10#ifdef HAVE_CONFIG_H
11#include <config.h>
12#endif
13
14#ifdef HAVE_MPI
15
16#include <mpi.h>
17
18#include <cassert>
19#include <set>
20
21#include "base/index.hpp"
[97c25dd]22#include "base/timer.hpp"
[dfed1c]23#include "comm/mpi/comm_info.hpp"
24#include "grid/grid.hpp"
25#include "grid/multigrid.hpp"
26
27using namespace VMG;
28
29VMG::MPI::CommInfo::CommInfo()
30{
31}
32
33VMG::MPI::CommInfo::~CommInfo()
34{
[2112b1]35 std::map<Key, MPI_Comm>::iterator c_iter;
36 for (c_iter=communicators.begin(); c_iter!=communicators.end(); ++c_iter)
37 if (c_iter->second != MPI_COMM_NULL)
38 MPI_Comm_free(&c_iter->second);
39
40 std::map<Key, MPI_Datatype>::iterator d_iter;
41 for (d_iter=datatypes.begin(); d_iter!=datatypes.end(); ++d_iter)
42 MPI_Type_free(&d_iter->second);
[dfed1c]43}
44
45MPI_Comm VMG::MPI::CommInfo::GetCommunicator(const Grid& grid)
46{
[2112b1]47 std::map<Key, MPI_Comm>::iterator iter = communicators.find(Key(grid));
[dfed1c]48
49 if (iter != communicators.end())
50 return iter->second;
51
52 int rank;
53 MPI_Comm comm;
54
55 MPI_Comm_rank(comm_global, &rank);
56
[2112b1]57 if (grid.Global().SizeLocal().Product() == 0) {
[dfed1c]58
59 MPI_Comm_split(comm_global, MPI_UNDEFINED, rank, &comm);
60
[2112b1]61 communicators.insert(std::make_pair(Key(grid), comm));
[dfed1c]62
63 }else {
64
65 Index dims, periods, coords;
66 MPI_Comm comm_temp;
67
68 MPI_Cart_get(comm_global, 3, dims.vec(), periods.vec(), coords.vec());
69
70 MPI_Comm_split(comm_global, 1, rank, &comm_temp);
71
72 dims = GetGlobalDims(comm_temp, coords);
73
74 MPI_Cart_create(comm_temp, 3, dims.vec(), periods.vec(), false, &comm);
75
76 MPI_Comm_free(&comm_temp);
77
[2112b1]78 communicators.insert(std::make_pair(Key(grid), comm));
[dfed1c]79
80 }
81
82 return comm;
83}
84
85MPI_Comm VMG::MPI::CommInfo::GetUnionCommunicator(const Grid& grid_1, const Grid& grid_2)
86{
[2112b1]87 std::map<Key, MPI_Comm>::iterator iter = communicators.find(Key(grid_1, grid_2));
[dfed1c]88
89 if (iter != communicators.end())
90 return iter->second;
91
92 int rank;
93 MPI_Comm comm;
94
95 MPI_Comm_rank(comm_global, &rank);
96
97 if (grid_1.Global().SizeLocal().Product() == 0 && grid_2.Global().SizeLocal().Product() == 0) {
98
99 MPI_Comm_split(comm_global, MPI_UNDEFINED, rank, &comm);
100
[2112b1]101 communicators.insert(std::make_pair(Key(grid_1, grid_2), comm));
[dfed1c]102
103 }else {
104
105 Index dims, periods, coords;
106 MPI_Comm comm_temp;
107
108 MPI_Cart_get(comm_global, 3, dims.vec(), periods.vec(), coords.vec());
109
110 MPI_Comm_split(comm_global, 1, rank, &comm_temp);
111
112 dims = GetGlobalDims(comm_temp, coords);
113
114 MPI_Cart_create(comm_temp, 3, dims.vec(), periods.vec(), false, &comm);
115
116 MPI_Comm_free(&comm_temp);
117
[2112b1]118 communicators.insert(std::make_pair(Key(grid_1, grid_2), comm));
[dfed1c]119
120 }
121
122 return comm;
123}
[2112b1]124MPI_Datatype VMG::MPI::CommInfo::GetDatatypeSubarray(const Grid& grid, const GridIteratorSet& bounds)
[dfed1c]125{
[2112b1]126 return GetDatatypeSubarray(bounds.Begin().GetBegin(), bounds.Begin().GetEnd(), grid.Local().SizeTotal());
127}
[dfed1c]128
129
[2112b1]130MPI_Datatype VMG::MPI::CommInfo::GetDatatypeSubarray(const Index& begin, const Index& end, const Index& size_total)
131{
[97c25dd]132 const Key k = Key(begin, end, size_total);
[dfed1c]133
[97c25dd]134 std::map<Key, MPI_Datatype>::const_iterator iter = datatypes.lower_bound(k);
135
136 if (iter != datatypes.end() && !(datatypes.key_comp()(k, iter->first)))
[2112b1]137 return iter->second;
[dfed1c]138
[2112b1]139 MPI_Datatype dt;
140 Index sizes = size_total;
141 Index subsizes = end - begin;
142 Index starts = begin;
[dfed1c]143
[2112b1]144 MPI_Type_create_subarray(3, sizes.vec(), subsizes.vec(), starts.vec(), MPI_ORDER_C, MPI_DOUBLE, &dt);
[dfed1c]145
[2112b1]146 MPI_Type_commit(&dt);
[dfed1c]147
[97c25dd]148 datatypes.insert(std::make_pair(k, dt));
149
[2112b1]150 return dt;
[dfed1c]151}
152
153Index VMG::MPI::CommInfo::Pos(const Grid& grid)
154{
155 Index dims, periods, coords;
156 int status;
157
158 MPI_Comm comm = GetCommunicator(grid);
159
160 MPI_Topo_test(comm, &status);
161
162 assert(status == MPI_CART);
163
164 MPI_Cart_get(comm, 3, dims.vec(), periods.vec(), coords.vec());
165
166 return coords;
167}
168
169Index VMG::MPI::CommInfo::Procs(const Grid& grid)
170{
171 Index dims, periods, coords;
172 int status;
173
174 MPI_Comm comm = GetCommunicator(grid);
175
176 MPI_Topo_test(comm, &status);
177
178 assert(status == MPI_CART);
179
180 MPI_Cart_get(comm, 3, dims.vec(), periods.vec(), coords.vec());
181
182 return dims;
183}
184
185Index VMG::MPI::CommInfo::GetGlobalDims(MPI_Comm comm, Index pos)
186{
187 std::set<int> unique_set[3];
188 Index dims;
189
190 int size;
191 MPI_Comm_size(comm, &size);
192
193 int* coordinates = new int[3*size];
194
195 MPI_Allgather(pos.vec(), 3, MPI_INT, coordinates, 3, MPI_INT, comm);
196
197 for (int i=0; i<size; ++i)
198 for (int j=0; j<3; ++j)
199 unique_set[j].insert(coordinates[3*i+j]);
200
201 for (int j=0; j<3; ++j)
202 dims[j] = static_cast<int>(unique_set[j].size());
203
204 delete [] coordinates;
205
206 return dims;
207}
208
209#endif /* HAVE_MPI */
Note: See TracBrowser for help on using the repository browser.