source: src/comm/mpi/comm_info.cpp@ 1610dc

Last change on this file since 1610dc was 2112b1, checked in by Julian Iseringhausen <isering@…>, 14 years ago

Did some work on the MPI communication.

git-svn-id: https://svn.version.fz-juelich.de/scafacos/trunk@1245 5161e1c8-67bf-11de-9fd5-51895aff932f

  • Property mode set to 100644
File size: 4.5 KB
Line 
1/**
2 * @file comm_info.cpp
3 * @author Julian Iseringhausen <isering@ins.uni-bonn.de>
4 * @date Mon Nov 21 13:27:22 2011
5 *
6 * @brief Stores some MPI-relevant information.
7 *
8 */
9
10#ifdef HAVE_CONFIG_H
11#include <config.h>
12#endif
13
14#ifdef HAVE_MPI
15
16#include <mpi.h>
17
18#include <cassert>
19#include <set>
20
21#include "base/index.hpp"
22#include "comm/mpi/comm_info.hpp"
23#include "grid/grid.hpp"
24#include "grid/multigrid.hpp"
25
26using namespace VMG;
27
28VMG::MPI::CommInfo::CommInfo()
29{
30}
31
32VMG::MPI::CommInfo::~CommInfo()
33{
34 std::map<Key, MPI_Comm>::iterator c_iter;
35 for (c_iter=communicators.begin(); c_iter!=communicators.end(); ++c_iter)
36 if (c_iter->second != MPI_COMM_NULL)
37 MPI_Comm_free(&c_iter->second);
38
39 std::map<Key, MPI_Datatype>::iterator d_iter;
40 for (d_iter=datatypes.begin(); d_iter!=datatypes.end(); ++d_iter)
41 MPI_Type_free(&d_iter->second);
42}
43
44MPI_Comm VMG::MPI::CommInfo::GetCommunicator(const Grid& grid)
45{
46 std::map<Key, MPI_Comm>::iterator iter = communicators.find(Key(grid));
47
48 if (iter != communicators.end())
49 return iter->second;
50
51 int rank;
52 MPI_Comm comm;
53
54 MPI_Comm_rank(comm_global, &rank);
55
56 if (grid.Global().SizeLocal().Product() == 0) {
57
58 MPI_Comm_split(comm_global, MPI_UNDEFINED, rank, &comm);
59
60 communicators.insert(std::make_pair(Key(grid), comm));
61
62 }else {
63
64 Index dims, periods, coords;
65 MPI_Comm comm_temp;
66
67 MPI_Cart_get(comm_global, 3, dims.vec(), periods.vec(), coords.vec());
68
69 MPI_Comm_split(comm_global, 1, rank, &comm_temp);
70
71 dims = GetGlobalDims(comm_temp, coords);
72
73 MPI_Cart_create(comm_temp, 3, dims.vec(), periods.vec(), false, &comm);
74
75 MPI_Comm_free(&comm_temp);
76
77 communicators.insert(std::make_pair(Key(grid), comm));
78
79 }
80
81 return comm;
82}
83
84MPI_Comm VMG::MPI::CommInfo::GetUnionCommunicator(const Grid& grid_1, const Grid& grid_2)
85{
86 std::map<Key, MPI_Comm>::iterator iter = communicators.find(Key(grid_1, grid_2));
87
88 if (iter != communicators.end())
89 return iter->second;
90
91 int rank;
92 MPI_Comm comm;
93
94 MPI_Comm_rank(comm_global, &rank);
95
96 if (grid_1.Global().SizeLocal().Product() == 0 && grid_2.Global().SizeLocal().Product() == 0) {
97
98 MPI_Comm_split(comm_global, MPI_UNDEFINED, rank, &comm);
99
100 communicators.insert(std::make_pair(Key(grid_1, grid_2), comm));
101
102 }else {
103
104 Index dims, periods, coords;
105 MPI_Comm comm_temp;
106
107 MPI_Cart_get(comm_global, 3, dims.vec(), periods.vec(), coords.vec());
108
109 MPI_Comm_split(comm_global, 1, rank, &comm_temp);
110
111 dims = GetGlobalDims(comm_temp, coords);
112
113 MPI_Cart_create(comm_temp, 3, dims.vec(), periods.vec(), false, &comm);
114
115 MPI_Comm_free(&comm_temp);
116
117 communicators.insert(std::make_pair(Key(grid_1, grid_2), comm));
118
119 }
120
121 return comm;
122}
123MPI_Datatype VMG::MPI::CommInfo::GetDatatypeSubarray(const Grid& grid, const GridIteratorSet& bounds)
124{
125 return GetDatatypeSubarray(bounds.Begin().GetBegin(), bounds.Begin().GetEnd(), grid.Local().SizeTotal());
126}
127
128
129MPI_Datatype VMG::MPI::CommInfo::GetDatatypeSubarray(const Index& begin, const Index& end, const Index& size_total)
130{
131 std::map<Key, MPI_Datatype>::iterator iter = datatypes.find(Key(begin, end, size_total));
132
133 if (iter != datatypes.end())
134 return iter->second;
135
136 MPI_Datatype dt;
137 Index sizes = size_total;
138 Index subsizes = end - begin;
139 Index starts = begin;
140
141 MPI_Type_create_subarray(3, sizes.vec(), subsizes.vec(), starts.vec(), MPI_ORDER_C, MPI_DOUBLE, &dt);
142
143 MPI_Type_commit(&dt);
144
145 return dt;
146}
147
148Index VMG::MPI::CommInfo::Pos(const Grid& grid)
149{
150 Index dims, periods, coords;
151 int status;
152
153 MPI_Comm comm = GetCommunicator(grid);
154
155 MPI_Topo_test(comm, &status);
156
157 assert(status == MPI_CART);
158
159 MPI_Cart_get(comm, 3, dims.vec(), periods.vec(), coords.vec());
160
161 return coords;
162}
163
164Index VMG::MPI::CommInfo::Procs(const Grid& grid)
165{
166 Index dims, periods, coords;
167 int status;
168
169 MPI_Comm comm = GetCommunicator(grid);
170
171 MPI_Topo_test(comm, &status);
172
173 assert(status == MPI_CART);
174
175 MPI_Cart_get(comm, 3, dims.vec(), periods.vec(), coords.vec());
176
177 return dims;
178}
179
180Index VMG::MPI::CommInfo::GetGlobalDims(MPI_Comm comm, Index pos)
181{
182 std::set<int> unique_set[3];
183 Index dims;
184
185 int size;
186 MPI_Comm_size(comm, &size);
187
188 int* coordinates = new int[3*size];
189
190 MPI_Allgather(pos.vec(), 3, MPI_INT, coordinates, 3, MPI_INT, comm);
191
192 for (int i=0; i<size; ++i)
193 for (int j=0; j<3; ++j)
194 unique_set[j].insert(coordinates[3*i+j]);
195
196 for (int j=0; j<3; ++j)
197 dims[j] = static_cast<int>(unique_set[j].size());
198
199 delete [] coordinates;
200
201 return dims;
202}
203
204#endif /* HAVE_MPI */
Note: See TracBrowser for help on using the repository browser.