| [fcf7f6] | 1 | /*
|
|---|
| 2 | * vmg - a versatile multigrid solver
|
|---|
| 3 | * Copyright (C) 2012 Institute for Numerical Simulation, University of Bonn
|
|---|
| 4 | *
|
|---|
| 5 | * vmg is free software: you can redistribute it and/or modify
|
|---|
| 6 | * it under the terms of the GNU General Public License as published by
|
|---|
| 7 | * the Free Software Foundation, either version 3 of the License, or
|
|---|
| 8 | * (at your option) any later version.
|
|---|
| 9 | *
|
|---|
| 10 | * vmg is distributed in the hope that it will be useful,
|
|---|
| 11 | * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|---|
| 12 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|---|
| 13 | * GNU General Public License for more details.
|
|---|
| 14 | *
|
|---|
| 15 | * You should have received a copy of the GNU General Public License
|
|---|
| 16 | * along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|---|
| 17 | */
|
|---|
| 18 |
|
|---|
| [dfed1c] | 19 | /**
|
|---|
| 20 | * @file comm_mpi.cpp
|
|---|
| 21 | * @author Julian Iseringhausen <isering@ins.uni-bonn.de>
|
|---|
| 22 | * @date Wed Jun 16 13:21:06 2010
|
|---|
| 23 | *
|
|---|
| 24 | * @brief Class for MPI-based communication.
|
|---|
| 25 | *
|
|---|
| 26 | */
|
|---|
| 27 |
|
|---|
| 28 | #ifdef HAVE_CONFIG_H
|
|---|
| 29 | #include <config.h>
|
|---|
| 30 | #endif
|
|---|
| 31 |
|
|---|
| 32 | #ifdef HAVE_MPI
|
|---|
| 33 |
|
|---|
| 34 | #include <mpi.h>
|
|---|
| [ac6d04] | 35 | #ifdef HAVE_MARMOT
|
|---|
| 36 | #include <enhancempicalls.h>
|
|---|
| 37 | #include <sourceinfompicalls.h>
|
|---|
| 38 | #endif
|
|---|
| [dfed1c] | 39 |
|
|---|
| 40 | #ifdef HAVE_BOOST_FILESYSTEM
|
|---|
| 41 | #include <boost/filesystem.hpp>
|
|---|
| 42 | namespace fs = boost::filesystem;
|
|---|
| 43 | #endif
|
|---|
| 44 |
|
|---|
| [ac6d04] | 45 | #ifdef HAVE_VTK
|
|---|
| 46 | #include <vtkAbstractArray.h>
|
|---|
| 47 | #include <vtkImageData.h>
|
|---|
| 48 | #include <vtkPointData.h>
|
|---|
| 49 | #include <vtkSmartPointer.h>
|
|---|
| 50 | #include <vtkXMLImageDataWriter.h>
|
|---|
| 51 | #endif
|
|---|
| 52 |
|
|---|
| [dfed1c] | 53 | #include <cstring>
|
|---|
| 54 | #include <sstream>
|
|---|
| 55 |
|
|---|
| 56 | #include "base/helper.hpp"
|
|---|
| [894a5f] | 57 | #include "base/tuple.hpp"
|
|---|
| [dfed1c] | 58 | #include "comm/comm_mpi.hpp"
|
|---|
| [ac6d04] | 59 | #include "comm/mpi/datatypes_local.hpp"
|
|---|
| [dfed1c] | 60 | #include "grid/grid.hpp"
|
|---|
| 61 | #include "grid/multigrid.hpp"
|
|---|
| 62 | #include "grid/tempgrid.hpp"
|
|---|
| 63 | #include "mg.hpp"
|
|---|
| [894a5f] | 64 | #include "base/timer.hpp"
|
|---|
| [f003a9] | 65 |
|
|---|
| [dfed1c] | 66 | static char print_buffer[512];
|
|---|
| 67 |
|
|---|
| 68 | using namespace VMG;
|
|---|
| 69 |
|
|---|
| [894a5f] | 70 | void CommMPI::IsendAll(Grid& grid, std::vector<VMG::MPI::Datatype>& types, const MPI_Comm& comm, const int& tag_start)
|
|---|
| 71 | {
|
|---|
| [ac6d04] | 72 | for (std::vector<VMG::MPI::Datatype>::const_iterator iter=types.begin(); iter!=types.end(); ++iter)
|
|---|
| 73 | iter->Isend(grid, tag_start, comm, Request());
|
|---|
| [894a5f] | 74 | }
|
|---|
| [dfed1c] | 75 |
|
|---|
| [ac6d04] | 76 | void CommMPI::IrecvAll(Grid& grid, std::vector<VMG::MPI::Datatype>& types, const MPI_Comm& comm, const int& tag_start)
|
|---|
| [894a5f] | 77 | {
|
|---|
| [ac6d04] | 78 | for (std::vector<VMG::MPI::Datatype>::const_iterator iter=types.begin(); iter!=types.end(); ++iter)
|
|---|
| 79 | iter->Irecv(grid, tag_start, comm, Request());
|
|---|
| [894a5f] | 80 | }
|
|---|
| [dfed1c] | 81 |
|
|---|
| [ac6d04] | 82 | void CommMPI::IsendAllBuffered(const Grid& grid, std::vector<VMG::MPI::Datatype>& types, const MPI_Comm& comm, const int& tag_start)
|
|---|
| [894a5f] | 83 | {
|
|---|
| [ac6d04] | 84 | for (std::vector<VMG::MPI::Datatype>::iterator iter=types.begin(); iter!=types.end(); ++iter)
|
|---|
| 85 | iter->IsendBuffered(grid, tag_start, comm, Request());
|
|---|
| [894a5f] | 86 | }
|
|---|
| [dfed1c] | 87 |
|
|---|
| [894a5f] | 88 | void CommMPI::IrecvAllBuffered(std::vector<VMG::MPI::Datatype>& types, const MPI_Comm& comm, const int& tag_start)
|
|---|
| 89 | {
|
|---|
| [ac6d04] | 90 | for (std::vector<VMG::MPI::Datatype>::iterator iter=types.begin(); iter!=types.end(); ++iter)
|
|---|
| 91 | iter->IrecvBuffered(tag_start, comm, Request());
|
|---|
| [894a5f] | 92 | }
|
|---|
| [dfed1c] | 93 |
|
|---|
| [ac6d04] | 94 | void CommMPI::ReplaceBufferAll(Grid& grid, const std::vector<VMG::MPI::Datatype>& types)
|
|---|
| [894a5f] | 95 | {
|
|---|
| [ac6d04] | 96 | for (std::vector<VMG::MPI::Datatype>::const_iterator iter=types.begin(); iter!= types.end(); ++iter)
|
|---|
| 97 | iter->GridReplace(grid);
|
|---|
| 98 | }
|
|---|
| [dfed1c] | 99 |
|
|---|
| [ac6d04] | 100 | void CommMPI::AddBufferAll(Grid& grid, const std::vector<VMG::MPI::Datatype>& types)
|
|---|
| 101 | {
|
|---|
| 102 | for (std::vector<VMG::MPI::Datatype>::const_iterator iter=types.begin(); iter!= types.end(); ++iter)
|
|---|
| 103 | iter->GridSum(grid);
|
|---|
| [894a5f] | 104 | }
|
|---|
| [dfed1c] | 105 |
|
|---|
| [ac6d04] | 106 | void CommMPI::CommSubgrid(Grid& grid_old, Grid& grid_new, const int& direction)
|
|---|
| [894a5f] | 107 | {
|
|---|
| 108 | MPI_Comm comm = settings.CommunicatorGlobal(grid_old);
|
|---|
| 109 | if (comm != MPI_COMM_NULL) {
|
|---|
| [ac6d04] | 110 | VMG::MPI::DatatypesGlobal& datatypes = settings.DatatypesGlobal(grid_old, grid_new, direction);
|
|---|
| 111 | IrecvAllBuffered(datatypes.Receive(), comm, 0411);
|
|---|
| 112 | IsendAllBuffered(grid_old, datatypes.Send(), comm, 0411);
|
|---|
| 113 | WaitAll();
|
|---|
| 114 | ReplaceBufferAll(grid_new, datatypes.Receive());
|
|---|
| [dfed1c] | 115 | }
|
|---|
| 116 | }
|
|---|
| 117 |
|
|---|
| [ac6d04] | 118 | void CommMPI::CommAddSubgrid(Grid& grid_old, Grid& grid_new, const int& direction)
|
|---|
| [dfed1c] | 119 | {
|
|---|
| [894a5f] | 120 | MPI_Comm comm = settings.CommunicatorGlobal(grid_old);
|
|---|
| [dfed1c] | 121 | if (comm != MPI_COMM_NULL) {
|
|---|
| [ac6d04] | 122 | VMG::MPI::DatatypesGlobal& datatypes = settings.DatatypesGlobal(grid_old, grid_new, direction);
|
|---|
| [894a5f] | 123 | IrecvAllBuffered(datatypes.Receive(), comm, 1806);
|
|---|
| [ac6d04] | 124 | IsendAllBuffered(grid_old, datatypes.Send(), comm, 1806);
|
|---|
| 125 | WaitAll();
|
|---|
| [894a5f] | 126 | AddBufferAll(grid_new, datatypes.Receive());
|
|---|
| [dfed1c] | 127 | }
|
|---|
| 128 | }
|
|---|
| 129 |
|
|---|
| [ac6d04] | 130 | void CommMPI::CommToGhosts(Grid& grid)
|
|---|
| [dfed1c] | 131 | {
|
|---|
| [894a5f] | 132 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
|---|
| [dfed1c] | 133 | if (comm != MPI_COMM_NULL) {
|
|---|
| [894a5f] | 134 | VMG::MPI::DatatypesLocal& types = settings.DatatypesLocal(grid);
|
|---|
| [ac6d04] | 135 | IrecvAllBuffered(types.Halo(), comm, 2310);
|
|---|
| 136 | IsendAllBuffered(grid, types.NB(), comm, 2310);
|
|---|
| 137 | WaitAll();
|
|---|
| 138 | ReplaceBufferAll(grid, types.Halo());
|
|---|
| [dfed1c] | 139 | }
|
|---|
| 140 | }
|
|---|
| 141 |
|
|---|
| [ac6d04] | 142 | void CommMPI::CommToGhostsAsyncStart(Grid& grid)
|
|---|
| [dfed1c] | 143 | {
|
|---|
| [ac6d04] | 144 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
|---|
| [dfed1c] | 145 | if (comm != MPI_COMM_NULL) {
|
|---|
| [894a5f] | 146 | VMG::MPI::DatatypesLocal& types = settings.DatatypesLocal(grid);
|
|---|
| [ac6d04] | 147 | IrecvAllBuffered(types.Halo(), comm, 2412);
|
|---|
| 148 | IsendAllBuffered(grid, types.NB(), comm, 2412);
|
|---|
| 149 | TestAll();
|
|---|
| [894a5f] | 150 | }
|
|---|
| 151 | }
|
|---|
| [dfed1c] | 152 |
|
|---|
| [ac6d04] | 153 | void CommMPI::CommToGhostsAsyncFinish(Grid& grid)
|
|---|
| [894a5f] | 154 | {
|
|---|
| [ac6d04] | 155 | WaitAll();
|
|---|
| 156 | ReplaceBufferAll(grid, settings.DatatypesLocal(grid).Halo());
|
|---|
| [894a5f] | 157 | }
|
|---|
| [dfed1c] | 158 |
|
|---|
| [ac6d04] | 159 | void CommMPI::CommFromGhosts(Grid& grid)
|
|---|
| [894a5f] | 160 | {
|
|---|
| 161 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
|---|
| 162 | if (comm != MPI_COMM_NULL) {
|
|---|
| 163 | VMG::MPI::DatatypesLocal& types = settings.DatatypesLocal(grid);
|
|---|
| [ac6d04] | 164 | IrecvAllBuffered(types.NB(), comm, 1337);
|
|---|
| 165 | IsendAllBuffered(grid, types.Halo(), comm, 1337);
|
|---|
| 166 | WaitAll();
|
|---|
| 167 | AddBufferAll(grid, types.NB());
|
|---|
| [97c25dd] | 168 | }
|
|---|
| 169 | }
|
|---|
| 170 |
|
|---|
| [ac6d04] | 171 | void CommMPI::CommFromGhostsAsyncStart(Grid& grid)
|
|---|
| [dfed1c] | 172 | {
|
|---|
| [ac6d04] | 173 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
|---|
| 174 | if (comm != MPI_COMM_NULL) {
|
|---|
| 175 | VMG::MPI::DatatypesLocal& types = settings.DatatypesLocal(grid);
|
|---|
| 176 | IrecvAllBuffered(types.NB(), comm, 0xc0ffee);
|
|---|
| 177 | IsendAllBuffered(grid, types.Halo(), comm, 0xc0ffee);
|
|---|
| 178 | TestAll();
|
|---|
| 179 | }
|
|---|
| [dfed1c] | 180 | }
|
|---|
| 181 |
|
|---|
| [ac6d04] | 182 | void CommMPI::CommFromGhostsAsyncFinish(Grid& grid)
|
|---|
| [dfed1c] | 183 | {
|
|---|
| [ac6d04] | 184 | WaitAll();
|
|---|
| 185 | AddBufferAll(grid, settings.DatatypesLocal(grid).NB());
|
|---|
| [dfed1c] | 186 | }
|
|---|
| 187 |
|
|---|
| [2a5451] | 188 | void CommMPI::Barrier()
|
|---|
| 189 | {
|
|---|
| 190 | MPI_Barrier(comm_global);
|
|---|
| 191 | }
|
|---|
| 192 |
|
|---|
| [ac6d04] | 193 | vmg_float CommMPI::GlobalSum(const vmg_float& value)
|
|---|
| [dfed1c] | 194 | {
|
|---|
| [ac6d04] | 195 | vmg_float result = value;
|
|---|
| 196 | MPI_Allreduce(MPI_IN_PLACE, &result, 1, MPI_DOUBLE, MPI_SUM, comm_global);
|
|---|
| 197 | return result;
|
|---|
| 198 | }
|
|---|
| [dfed1c] | 199 |
|
|---|
| [ac6d04] | 200 | vmg_float CommMPI::GlobalSumRoot(const vmg_float& value)
|
|---|
| 201 | {
|
|---|
| 202 | vmg_float recv_buffer = value;
|
|---|
| 203 | vmg_float send_buffer = value;
|
|---|
| 204 | MPI_Reduce(&send_buffer, &recv_buffer, 1, MPI_DOUBLE, MPI_SUM, 0, comm_global);
|
|---|
| 205 | return recv_buffer;
|
|---|
| [dfed1c] | 206 | }
|
|---|
| 207 |
|
|---|
| [ac6d04] | 208 | void CommMPI::GlobalSumArray(vmg_float* array, const vmg_int& size)
|
|---|
| [dfed1c] | 209 | {
|
|---|
| [ac6d04] | 210 | MPI_Allreduce(MPI_IN_PLACE, array, size, MPI_DOUBLE, MPI_SUM, comm_global);
|
|---|
| 211 | }
|
|---|
| [dfed1c] | 212 |
|
|---|
| [2a5451] | 213 | void CommMPI::GlobalBroadcast(vmg_float& value)
|
|---|
| 214 | {
|
|---|
| 215 | MPI_Bcast(&value, 1, MPI_DOUBLE, 0, comm_global);
|
|---|
| 216 | }
|
|---|
| 217 |
|
|---|
| 218 | void CommMPI::GlobalGather(vmg_float& value, vmg_float* array)
|
|---|
| 219 | {
|
|---|
| 220 | MPI_Gather(&value, 1, MPI_DOUBLE, array, 1, MPI_DOUBLE, 0, comm_global);
|
|---|
| 221 | }
|
|---|
| 222 |
|
|---|
| [ac6d04] | 223 | vmg_int CommMPI::GlobalSum(const vmg_int& value)
|
|---|
| 224 | {
|
|---|
| 225 | vmg_int result = value;
|
|---|
| 226 | MPI_Allreduce(MPI_IN_PLACE, &result, 1, MPI_INT, MPI_SUM, comm_global);
|
|---|
| 227 | return result;
|
|---|
| 228 | }
|
|---|
| [dfed1c] | 229 |
|
|---|
| [ac6d04] | 230 | vmg_int CommMPI::GlobalSumRoot(const vmg_int& value)
|
|---|
| 231 | {
|
|---|
| 232 | vmg_int recv_buffer = value;
|
|---|
| 233 | vmg_int send_buffer = value;
|
|---|
| 234 | MPI_Reduce(&send_buffer, &recv_buffer, 1, MPI_INT, MPI_SUM, 0, comm_global);
|
|---|
| 235 | return recv_buffer;
|
|---|
| 236 | }
|
|---|
| [dfed1c] | 237 |
|
|---|
| [ac6d04] | 238 | void CommMPI::GlobalSumArray(vmg_int* array, const vmg_int& size)
|
|---|
| 239 | {
|
|---|
| 240 | MPI_Allreduce(MPI_IN_PLACE, array, size, MPI_INT, MPI_SUM, comm_global);
|
|---|
| [dfed1c] | 241 | }
|
|---|
| 242 |
|
|---|
| [b51c3b] | 243 | vmg_float CommMPI::GlobalMax(const vmg_float& value)
|
|---|
| 244 | {
|
|---|
| 245 | vmg_float result = value;
|
|---|
| 246 | MPI_Allreduce(MPI_IN_PLACE, &result, 1, MPI_DOUBLE, MPI_MAX, comm_global);
|
|---|
| 247 | return result;
|
|---|
| 248 | }
|
|---|
| 249 |
|
|---|
| 250 | vmg_float CommMPI::GlobalMaxRoot(const vmg_float& value)
|
|---|
| 251 | {
|
|---|
| 252 | vmg_float recv_buffer = value;
|
|---|
| 253 | vmg_float send_buffer = value;
|
|---|
| 254 | MPI_Reduce(&send_buffer, &recv_buffer, 1, MPI_DOUBLE, MPI_MAX, 0, comm_global);
|
|---|
| 255 | return recv_buffer;
|
|---|
| 256 | }
|
|---|
| 257 |
|
|---|
| 258 | void CommMPI::GlobalMaxArray(vmg_float* array, const vmg_int& size)
|
|---|
| 259 | {
|
|---|
| 260 | MPI_Allreduce(MPI_IN_PLACE, array, size, MPI_DOUBLE, MPI_MAX, comm_global);
|
|---|
| 261 | }
|
|---|
| 262 |
|
|---|
| 263 | vmg_int CommMPI::GlobalMax(const vmg_int& value)
|
|---|
| 264 | {
|
|---|
| 265 | vmg_int result = value;
|
|---|
| 266 | MPI_Allreduce(MPI_IN_PLACE, &result, 1, MPI_INT, MPI_MAX, comm_global);
|
|---|
| 267 | return result;
|
|---|
| 268 | }
|
|---|
| 269 |
|
|---|
| 270 | vmg_int CommMPI::GlobalMaxRoot(const vmg_int& value)
|
|---|
| 271 | {
|
|---|
| 272 | vmg_int recv_buffer = value;
|
|---|
| 273 | vmg_int send_buffer = value;
|
|---|
| 274 | MPI_Reduce(&send_buffer, &recv_buffer, 1, MPI_INT, MPI_MAX, 0, comm_global);
|
|---|
| 275 | return recv_buffer;
|
|---|
| 276 | }
|
|---|
| 277 |
|
|---|
| 278 | void CommMPI::GlobalMaxArray(vmg_int* array, const vmg_int& size)
|
|---|
| 279 | {
|
|---|
| 280 | MPI_Allreduce(MPI_IN_PLACE, array, size, MPI_INT, MPI_MAX, comm_global);
|
|---|
| 281 | }
|
|---|
| 282 |
|
|---|
| [2a5451] | 283 | void CommMPI::GlobalBroadcast(vmg_int& value)
|
|---|
| 284 | {
|
|---|
| 285 | MPI_Bcast(&value, 1, MPI_INT, 0, comm_global);
|
|---|
| 286 | }
|
|---|
| 287 |
|
|---|
| 288 | void CommMPI::GlobalGather(vmg_int& value, vmg_int* array)
|
|---|
| 289 | {
|
|---|
| 290 | MPI_Gather(&value, 1, MPI_INT, array, 1, MPI_INT, 0, comm_global);
|
|---|
| 291 | }
|
|---|
| 292 |
|
|---|
| 293 | void CommMPI::GlobalBroadcast(char* str)
|
|---|
| 294 | {
|
|---|
| 295 | MPI_Bcast(str, std::strlen(str)+1, MPI_CHAR, 0, comm_global);
|
|---|
| 296 | }
|
|---|
| 297 |
|
|---|
| [ac6d04] | 298 | vmg_float CommMPI::LevelSum(const Grid& grid, const vmg_float& value)
|
|---|
| [dfed1c] | 299 | {
|
|---|
| 300 | vmg_float result = value;
|
|---|
| [ac6d04] | 301 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
|---|
| 302 | assert(comm != MPI_COMM_NULL);
|
|---|
| 303 | MPI_Allreduce(MPI_IN_PLACE, &result, 1, MPI_DOUBLE, MPI_SUM, comm);
|
|---|
| [dfed1c] | 304 | return result;
|
|---|
| 305 | }
|
|---|
| 306 |
|
|---|
| [ac6d04] | 307 | vmg_float CommMPI::LevelSumRoot(const Grid& grid, const vmg_float& value)
|
|---|
| [dfed1c] | 308 | {
|
|---|
| [ac6d04] | 309 | vmg_float recv_buffer = value;
|
|---|
| [dfed1c] | 310 | vmg_float send_buffer = value;
|
|---|
| [ac6d04] | 311 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
|---|
| 312 | assert(comm != MPI_COMM_NULL);
|
|---|
| 313 | MPI_Reduce(&send_buffer, &recv_buffer, 1, MPI_DOUBLE, MPI_SUM, 0, comm);
|
|---|
| 314 | return recv_buffer;
|
|---|
| 315 | }
|
|---|
| [dfed1c] | 316 |
|
|---|
| [ac6d04] | 317 | void CommMPI::LevelSumArray(const Grid& grid, vmg_float* array, const vmg_int& size)
|
|---|
| 318 | {
|
|---|
| 319 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
|---|
| 320 | assert(comm != MPI_COMM_NULL);
|
|---|
| 321 | MPI_Allreduce(MPI_IN_PLACE, array, size, MPI_DOUBLE, MPI_SUM, comm);
|
|---|
| 322 | }
|
|---|
| [dfed1c] | 323 |
|
|---|
| [ac6d04] | 324 | vmg_int CommMPI::LevelSum(const Grid& grid, const vmg_int& value)
|
|---|
| 325 | {
|
|---|
| 326 | vmg_int result = value;
|
|---|
| 327 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
|---|
| 328 | assert(comm != MPI_COMM_NULL);
|
|---|
| 329 | MPI_Allreduce(MPI_IN_PLACE, &result, 1, MPI_INT, MPI_SUM, comm);
|
|---|
| 330 | return result;
|
|---|
| 331 | }
|
|---|
| 332 |
|
|---|
| 333 | vmg_int CommMPI::LevelSumRoot(const Grid& grid, const vmg_int& value)
|
|---|
| 334 | {
|
|---|
| 335 | vmg_int recv_buffer = value;
|
|---|
| 336 | vmg_int send_buffer = value;
|
|---|
| 337 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
|---|
| 338 | assert(comm != MPI_COMM_NULL);
|
|---|
| 339 | MPI_Reduce(&send_buffer, &recv_buffer, 1, MPI_INT, MPI_SUM, 0, comm);
|
|---|
| [dfed1c] | 340 | return recv_buffer;
|
|---|
| 341 | }
|
|---|
| 342 |
|
|---|
| [ac6d04] | 343 | void CommMPI::LevelSumArray(const Grid& grid, vmg_int* array, const vmg_int& size)
|
|---|
| [dfed1c] | 344 | {
|
|---|
| [ac6d04] | 345 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
|---|
| 346 | assert(comm != MPI_COMM_NULL);
|
|---|
| 347 | MPI_Allreduce(MPI_IN_PLACE, array, size, MPI_INT, MPI_SUM, comm);
|
|---|
| [dfed1c] | 348 | }
|
|---|
| 349 |
|
|---|
| 350 | void CommMPI::PrintString(const char* format, ...)
|
|---|
| 351 | {
|
|---|
| 352 | va_list args;
|
|---|
| 353 | va_start(args, format);
|
|---|
| 354 | vsprintf(print_buffer, format, args);
|
|---|
| 355 | printf("VMG: Rank %d: %s\n", GlobalRank(), print_buffer);
|
|---|
| 356 | va_end(args);
|
|---|
| 357 | }
|
|---|
| 358 |
|
|---|
| 359 | void CommMPI::PrintStringOnce(const char* format, ...)
|
|---|
| 360 | {
|
|---|
| 361 | if (GlobalRank() == 0) {
|
|---|
| 362 | va_list args;
|
|---|
| 363 | va_start(args, format);
|
|---|
| 364 | vsprintf(print_buffer, format, args);
|
|---|
| 365 | printf("VMG: Rank %d: %s\n", GlobalRank(), print_buffer);
|
|---|
| 366 | va_end(args);
|
|---|
| 367 | }
|
|---|
| 368 | }
|
|---|
| 369 |
|
|---|
| 370 | void CommMPI::PrintXML(const std::string& filename, const std::string& xml_data)
|
|---|
| 371 | {
|
|---|
| 372 | MPI_File file;
|
|---|
| 373 | std::stringstream path, xml_header;
|
|---|
| 374 |
|
|---|
| 375 | pugi::xml_document doc;
|
|---|
| 376 | pugi::xml_node node_data = doc.append_child("Global").append_child("NumProcesses").append_child(pugi::node_pcdata);
|
|---|
| 377 | node_data.set_value(Helper::ToString(GlobalProcs().Product()).c_str());
|
|---|
| 378 | doc.save(xml_header);
|
|---|
| 379 |
|
|---|
| 380 | path << OutputPath() << filename;
|
|---|
| 381 |
|
|---|
| 382 | char* filename_array = Helper::GetCharArray(path.str());
|
|---|
| 383 | char* xml_header_array = Helper::GetCharArray(xml_header.str());
|
|---|
| 384 | char* str_array = Helper::GetCharArray(xml_data);
|
|---|
| 385 |
|
|---|
| 386 | MPI_File_open(MPI_COMM_SELF, filename_array, MPI_MODE_WRONLY|MPI_MODE_CREATE, MPI_INFO_NULL, &file);
|
|---|
| 387 | MPI_File_set_size(file, 0);
|
|---|
| 388 | MPI_File_write(file, xml_header_array, xml_header.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
|
|---|
| 389 | MPI_File_write(file, str_array, xml_data.size(), MPI_CHAR, MPI_STATUS_IGNORE);
|
|---|
| 390 | MPI_File_close(&file);
|
|---|
| 391 |
|
|---|
| 392 | delete [] filename_array;
|
|---|
| 393 | delete [] xml_header_array;
|
|---|
| 394 | delete [] str_array;
|
|---|
| 395 | }
|
|---|
| 396 |
|
|---|
| 397 | void CommMPI::PrintXMLAll(const std::string& filename, const std::string& xml_data)
|
|---|
| 398 | {
|
|---|
| 399 | MPI_File file;
|
|---|
| 400 | std::stringstream path;
|
|---|
| 401 |
|
|---|
| 402 | path << OutputPath() << filename;
|
|---|
| 403 |
|
|---|
| 404 | char* filename_array = Helper::GetCharArray(path.str());
|
|---|
| 405 | char* str_array = Helper::GetCharArray(xml_data);
|
|---|
| 406 |
|
|---|
| 407 | MPI_File_open(comm_global, filename_array, MPI_MODE_WRONLY|MPI_MODE_CREATE, MPI_INFO_NULL, &file);
|
|---|
| 408 | MPI_File_set_size(file, 0);
|
|---|
| 409 |
|
|---|
| 410 | if (GlobalRank() == 0) {
|
|---|
| 411 | std::stringstream xml_header;
|
|---|
| 412 | pugi::xml_document doc;
|
|---|
| 413 | pugi::xml_node node_data = doc.append_child("Global").append_child("NumProcesses").append_child(pugi::node_pcdata);
|
|---|
| 414 | node_data.set_value(Helper::ToString(GlobalProcs().Product()).c_str());
|
|---|
| 415 | doc.save(xml_header);
|
|---|
| 416 |
|
|---|
| 417 | char* xml_header_array = Helper::GetCharArray(xml_header.str());
|
|---|
| 418 |
|
|---|
| 419 | MPI_File_write_shared(file, xml_header_array, xml_header.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
|
|---|
| 420 |
|
|---|
| 421 | delete [] xml_header_array;
|
|---|
| 422 | }
|
|---|
| 423 |
|
|---|
| 424 | MPI_File_write_ordered(file, str_array, xml_data.size(), MPI_CHAR, MPI_STATUS_IGNORE);
|
|---|
| 425 | MPI_File_close(&file);
|
|---|
| 426 |
|
|---|
| 427 | delete [] filename_array;
|
|---|
| 428 | delete [] str_array;
|
|---|
| 429 | }
|
|---|
| 430 |
|
|---|
| [ac6d04] | 431 | void CommMPI::PrintGridInformation(const Grid& grid, char* filename, const std::string& name)
|
|---|
| [dfed1c] | 432 | {
|
|---|
| 433 | std::stringstream buf;
|
|---|
| 434 | MPI_File file;
|
|---|
| [ac6d04] | 435 | int rank, size;
|
|---|
| 436 | int size_local, size_local_max;
|
|---|
| [dfed1c] | 437 |
|
|---|
| [ac6d04] | 438 | MPI_Comm comm = settings.CommunicatorGlobal(grid);
|
|---|
| 439 | MPI_Comm comm_local = settings.CommunicatorLocal(grid);
|
|---|
| [dfed1c] | 440 |
|
|---|
| [ac6d04] | 441 | if (comm_local != MPI_COMM_NULL)
|
|---|
| 442 | MPI_Comm_size(comm_local, &size_local);
|
|---|
| 443 | else
|
|---|
| 444 | size_local = 0;
|
|---|
| [dfed1c] | 445 |
|
|---|
| [ac6d04] | 446 | if (comm != MPI_COMM_NULL) {
|
|---|
| [dfed1c] | 447 |
|
|---|
| [ac6d04] | 448 | MPI_Reduce(&size_local, &size_local_max, 1, MPI_INT, MPI_MAX, 0, comm);
|
|---|
| 449 |
|
|---|
| 450 | MPI_File_open(comm, filename, MPI_MODE_WRONLY|MPI_MODE_CREATE|MPI_MODE_APPEND, MPI_INFO_NULL, &file);
|
|---|
| 451 |
|
|---|
| 452 | MPI_Comm_rank(comm, &rank);
|
|---|
| 453 | MPI_Comm_size(comm, &size);
|
|---|
| 454 |
|
|---|
| 455 | if (rank == 0) {
|
|---|
| 456 |
|
|---|
| 457 | buf << "###########################################################" << std::endl
|
|---|
| 458 | << "GLOBAL INFORMATION:" << std::endl
|
|---|
| 459 | << " NAME: " << name << std::endl
|
|---|
| 460 | << " LEVEL: " << grid.Level() << std::endl
|
|---|
| 461 | << " COMM_SIZE_GLOBAL: " << size << std::endl
|
|---|
| 462 | << " COMM_SIZE_LOCAL: " << size_local_max << std::endl
|
|---|
| 463 | << " GLOBAL:" << std::endl
|
|---|
| 464 | << " GLOBAL_FINER_BEGIN: " << grid.Global().GlobalFinerBegin() << std::endl
|
|---|
| 465 | << " GLOBAL_FINER_END: " << grid.Global().GlobalFinerEnd() << std::endl
|
|---|
| 466 | << " GLOBAL_FINER_SIZE: " << grid.Global().GlobalFinerSize() << std::endl
|
|---|
| 467 | << " FINEST_ABS_BEGIN: " << grid.Global().FinestAbsBegin() << std::endl
|
|---|
| 468 | << " FINEST_ABS_END: " << grid.Global().FinestAbsEnd() << std::endl
|
|---|
| 469 | << " FINEST_ABS_SIZE: " << grid.Global().FinestAbsSize() << std::endl
|
|---|
| 470 | << " GLOBAL_SIZE: " << grid.Global().GlobalSize() << std::endl
|
|---|
| 471 | << " EXTENT:" << std::endl
|
|---|
| 472 | << " BEGIN: " << grid.Extent().Begin() << std::endl
|
|---|
| 473 | << " END: " << grid.Extent().End() << std::endl
|
|---|
| 474 | << " SIZE: " << grid.Extent().Size() << std::endl
|
|---|
| 475 | << " MESH_WIDTH: " << grid.Extent().MeshWidth() << std::endl
|
|---|
| 476 | << std::endl
|
|---|
| 477 | << "LOCAL INFORMATION:" << std::endl;
|
|---|
| 478 | }
|
|---|
| [dfed1c] | 479 |
|
|---|
| [ac6d04] | 480 | buf << "RANK " << rank << ":" << std::endl
|
|---|
| 481 | << " GLOBAL:" << std::endl
|
|---|
| 482 | << " LOCAL_BEGIN: " << grid.Global().LocalBegin() << std::endl
|
|---|
| 483 | << " LOCAL_END: " << grid.Global().LocalEnd() << std::endl
|
|---|
| 484 | << " LOCAL_SIZE: " << grid.Global().LocalSize() << std::endl
|
|---|
| 485 | << " LOCAL_FINER_BEGIN: " << grid.Global().LocalFinerBegin() << std::endl
|
|---|
| 486 | << " LOCAL_FINER_END: " << grid.Global().LocalFinerEnd() << std::endl
|
|---|
| 487 | << " LOCAL_FINER_SIZE: " << grid.Global().LocalFinerSize() << std::endl
|
|---|
| 488 | << " BOUNDARY_TYPE: " << grid.Global().BoundaryType() << std::endl
|
|---|
| 489 | << " LOCAL:" << std::endl
|
|---|
| 490 | << " BEGIN: " << grid.Local().Begin() << std::endl
|
|---|
| 491 | << " END: " << grid.Local().End() << std::endl
|
|---|
| 492 | << " SIZE: " << grid.Local().Size() << std::endl
|
|---|
| 493 | << " SIZE_TOTAL: " << grid.Local().SizeTotal() << std::endl
|
|---|
| 494 | << " HALO_BEGIN_1: " << grid.Local().HaloBegin1() << std::endl
|
|---|
| 495 | << " HALO_END_1: " << grid.Local().HaloEnd1() << std::endl
|
|---|
| 496 | << " HALO_SIZE_1: " << grid.Local().HaloSize1() << std::endl
|
|---|
| 497 | << " HALO_BEGIN_2: " << grid.Local().HaloBegin2() << std::endl
|
|---|
| 498 | << " HALO_END_2: " << grid.Local().HaloEnd2() << std::endl
|
|---|
| 499 | << " HALO_SIZE_2: " << grid.Local().HaloSize2() << std::endl
|
|---|
| 500 | << " BOUNDARY_BEGIN_1: " << grid.Local().BoundaryBegin1() << std::endl
|
|---|
| 501 | << " BOUNDARY_END_1: " << grid.Local().BoundaryEnd1() << std::endl
|
|---|
| 502 | << " BOUNDARY_SIZE_1: " << grid.Local().BoundarySize1() << std::endl
|
|---|
| 503 | << " BOUNDARY_BEGIN_2: " << grid.Local().BoundaryBegin2() << std::endl
|
|---|
| 504 | << " BOUNDARY_END_2: " << grid.Local().BoundaryEnd2() << std::endl
|
|---|
| 505 | << " BOUNDARY_SIZE_2: " << grid.Local().BoundarySize2() << std::endl
|
|---|
| [716da7] | 506 | << " FINER_BEGIN: " << grid.Local().FinerBegin() << std::endl
|
|---|
| 507 | << " FINER_END: " << grid.Local().FinerEnd() << std::endl
|
|---|
| 508 | << " FINER_SIZE: " << grid.Local().FinerSize() << std::endl;
|
|---|
| [ac6d04] | 509 |
|
|---|
| 510 | if (rank == size-1)
|
|---|
| 511 | buf << "###########################################################" << std::endl;
|
|---|
| [dfed1c] | 512 |
|
|---|
| [ac6d04] | 513 | char* char_buf = Helper::GetCharArray(buf.str());
|
|---|
| 514 | MPI_File_write_ordered(file, char_buf, buf.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
|
|---|
| 515 | delete [] char_buf;
|
|---|
| [dfed1c] | 516 |
|
|---|
| [ac6d04] | 517 | MPI_File_close(&file);
|
|---|
| [dfed1c] | 518 |
|
|---|
| [ac6d04] | 519 | }
|
|---|
| 520 | }
|
|---|
| [dfed1c] | 521 |
|
|---|
| [ac6d04] | 522 | void CommMPI::PrintAllSettings()
|
|---|
| 523 | {
|
|---|
| 524 | std::stringstream buf;
|
|---|
| 525 | MPI_File file;
|
|---|
| [dfed1c] | 526 |
|
|---|
| [ac6d04] | 527 | const Multigrid& mg = *MG::GetSol();
|
|---|
| [dfed1c] | 528 |
|
|---|
| [ac6d04] | 529 | buf << OutputPath() << "settings.txt";
|
|---|
| 530 | char *filename = Helper::GetCharArray(buf.str());
|
|---|
| [dfed1c] | 531 |
|
|---|
| [ac6d04] | 532 | MPI_File_open(comm_global, filename, MPI_MODE_WRONLY|MPI_MODE_CREATE, MPI_INFO_NULL, &file);
|
|---|
| 533 | MPI_File_set_size(file, 0);
|
|---|
| 534 | MPI_File_close(&file);
|
|---|
| [dfed1c] | 535 |
|
|---|
| [ac6d04] | 536 | for (int i=mg.MinLevel(); i<=mg.MaxLevel(); ++i)
|
|---|
| 537 | PrintGridInformation(mg(i), filename, "MULTIGRID");
|
|---|
| [dfed1c] | 538 |
|
|---|
| [ac6d04] | 539 | for (int i=mg.MinLevel()+1; i<=mg.MaxLevel(); ++i)
|
|---|
| 540 | PrintGridInformation(settings.CoarserGrid(mg(i)), filename, "COARSER_GRID");
|
|---|
| [dfed1c] | 541 |
|
|---|
| [ac6d04] | 542 | for (int i=mg.MinLevel(); i<mg.MaxLevel(); ++i)
|
|---|
| 543 | PrintGridInformation(settings.FinerGrid(mg(i)), filename, "FINER_GRID");
|
|---|
| [dfed1c] | 544 |
|
|---|
| 545 | delete [] filename;
|
|---|
| 546 |
|
|---|
| 547 | }
|
|---|
| 548 |
|
|---|
| 549 | void CommMPI::PrintGrid(Grid& grid, const char* information)
|
|---|
| 550 | {
|
|---|
| [ac6d04] | 551 | int output_count = OutputCount();
|
|---|
| [dfed1c] | 552 |
|
|---|
| [ac6d04] | 553 | #ifdef HAVE_VTK
|
|---|
| [dfed1c] | 554 |
|
|---|
| [ac6d04] | 555 | if (settings.CommunicatorLocal(grid) != MPI_COMM_NULL) {
|
|---|
| [dfed1c] | 556 |
|
|---|
| [ac6d04] | 557 | Index end, end_global;
|
|---|
| [dfed1c] | 558 |
|
|---|
| [ac6d04] | 559 | for (int i=0; i<3; ++i) {
|
|---|
| 560 | end[i] = grid.Local().End()[i];
|
|---|
| 561 | end_global[i] = grid.Global().LocalEnd()[i];
|
|---|
| 562 | }
|
|---|
| [dfed1c] | 563 |
|
|---|
| [ac6d04] | 564 | vtkSmartPointer<vtkImageData> image = vtkSmartPointer<vtkImageData>::New();
|
|---|
| 565 | image->SetExtent(grid.Global().LocalBegin().X(), end_global.X()-1,
|
|---|
| 566 | grid.Global().LocalBegin().Y(), end_global.Y()-1,
|
|---|
| 567 | grid.Global().LocalBegin().Z(), end_global.Z()-1);
|
|---|
| 568 | image->SetSpacing(grid.Extent().MeshWidth().vec());
|
|---|
| 569 | image->SetOrigin(grid.Extent().Begin().vec());
|
|---|
| 570 | image->SetScalarTypeToDouble();
|
|---|
| 571 | image->SetNumberOfScalarComponents(1);
|
|---|
| 572 | image->AllocateScalars();
|
|---|
| 573 | image->GetPointData()->GetScalars()->SetName(information);
|
|---|
| [dfed1c] | 574 |
|
|---|
| [ac6d04] | 575 | Index i;
|
|---|
| 576 | for (i.X()=grid.Local().Begin().X(); i.X()<end.X(); ++i.X())
|
|---|
| 577 | for (i.Y()=grid.Local().Begin().Y(); i.Y()<end.Y(); ++i.Y())
|
|---|
| 578 | for (i.Z()=grid.Local().Begin().Z(); i.Z()<end.Z(); ++i.Z())
|
|---|
| 579 | image->SetScalarComponentFromDouble(i.X() - grid.Local().Begin().X() + grid.Global().LocalBegin().X(),
|
|---|
| 580 | i.Y() - grid.Local().Begin().Y() + grid.Global().LocalBegin().Y(),
|
|---|
| 581 | i.Z() - grid.Local().Begin().Z() + grid.Global().LocalBegin().Z(),
|
|---|
| 582 | 0, grid.GetVal(i));
|
|---|
| 583 |
|
|---|
| 584 | image->Update();
|
|---|
| 585 |
|
|---|
| 586 | int rank, size;
|
|---|
| 587 | MPI_Comm_rank(comm_global, &rank);
|
|---|
| 588 | MPI_Comm_size(comm_global, &size);
|
|---|
| 589 |
|
|---|
| 590 | std::stringstream filename;
|
|---|
| 591 | filename << OutputPath() << std::setw(4) << std::setfill('0') << output_count << "_" << rank << ".vti";
|
|---|
| 592 |
|
|---|
| 593 | vtkSmartPointer<vtkXMLImageDataWriter> writer = vtkSmartPointer<vtkXMLImageDataWriter>::New();
|
|---|
| 594 | writer->SetFileName(filename.str().c_str());
|
|---|
| 595 | writer->SetCompressorTypeToNone();
|
|---|
| 596 | writer->SetDataModeToAscii();
|
|---|
| 597 | writer->SetInput(image);
|
|---|
| 598 | writer->Update();
|
|---|
| 599 | writer->Write();
|
|---|
| [dfed1c] | 600 |
|
|---|
| 601 | }
|
|---|
| 602 |
|
|---|
| [ac6d04] | 603 | #else /* HAVE_VTK */
|
|---|
| [dfed1c] | 604 | Index i;
|
|---|
| 605 | std::stringstream buf;
|
|---|
| 606 |
|
|---|
| [ac6d04] | 607 | Index begin, end;
|
|---|
| [dfed1c] | 608 | Index begin_local, end_local, begin_global, end_global;
|
|---|
| 609 |
|
|---|
| 610 | CommToGhosts(grid);
|
|---|
| 611 |
|
|---|
| 612 | for (int i=0; i<3; ++i) {
|
|---|
| [ac6d04] | 613 | end[i] = grid.Local().End()[i] + (grid.Global().LocalEnd()[i] == grid.Global().GlobalSize()[i] ? 0 : grid.Local().HaloSize1()[i]);
|
|---|
| 614 | end_local[i] = grid.Global().LocalEnd()[i] - (grid.Global().LocalEnd()[i] == grid.Global().GlobalSize()[i] ? 1 : 0);
|
|---|
| [dfed1c] | 615 | }
|
|---|
| 616 |
|
|---|
| [ac6d04] | 617 | begin = grid.Local().Begin();
|
|---|
| 618 | begin_local = grid.Global().LocalBegin();
|
|---|
| 619 | begin_global = 0;
|
|---|
| 620 | end_global = grid.Global().GlobalSize()-1;
|
|---|
| [dfed1c] | 621 |
|
|---|
| 622 | for (i.Z()=begin.Z(); i.Z()<end.Z(); ++i.Z())
|
|---|
| 623 | for (i.Y()=begin.Y(); i.Y()<end.Y(); ++i.Y())
|
|---|
| 624 | for (i.X()=begin.X(); i.X()<end.X(); ++i.X())
|
|---|
| 625 | buf << std::scientific << grid.GetVal(i) << " ";
|
|---|
| 626 |
|
|---|
| 627 | CreateOutputFiles(grid, buf, information,
|
|---|
| 628 | begin_global, end_global,
|
|---|
| [ac6d04] | 629 | begin_local, end_local,
|
|---|
| 630 | output_count);
|
|---|
| 631 | #endif /* HAVE_VTK */
|
|---|
| [dfed1c] | 632 | }
|
|---|
| 633 |
|
|---|
| [ac6d04] | 634 | void CommMPI::PrintDefect(Grid& sol, Grid& rhs, const char* information)
|
|---|
| [dfed1c] | 635 | {
|
|---|
| [ac6d04] | 636 | TempGrid *temp = MG::GetTempGrid();
|
|---|
| 637 | temp->SetProperties(sol);
|
|---|
| 638 | temp->ImportFromResidual(sol, rhs);
|
|---|
| 639 | PrintGrid(*temp, information);
|
|---|
| [dfed1c] | 640 | }
|
|---|
| 641 |
|
|---|
| 642 | void CommMPI::CreateOutputFiles(const Grid& grid, const std::stringstream& serial_data, const char* information,
|
|---|
| 643 | const Index& begin_global, const Index& end_global,
|
|---|
| [ac6d04] | 644 | const Index& begin_local, const Index& end_local,
|
|---|
| 645 | const int& output_count)
|
|---|
| [dfed1c] | 646 | {
|
|---|
| [894a5f] | 647 | MPI_Comm comm = settings.CommunicatorGlobal(grid);
|
|---|
| [dfed1c] | 648 |
|
|---|
| 649 | if (comm != MPI_COMM_NULL) {
|
|---|
| 650 |
|
|---|
| 651 | MPI_File file;
|
|---|
| 652 | std::string conv_information = Helper::ReplaceWhitespaces(information, "_");
|
|---|
| 653 |
|
|---|
| 654 | CreateParallelOutputFile(grid, comm, output_count, conv_information.c_str(),
|
|---|
| 655 | begin_global, end_global, begin_local, end_local);
|
|---|
| 656 |
|
|---|
| 657 | file = CreateSerialOutputFile(grid, comm, output_count, conv_information.c_str(),
|
|---|
| 658 | begin_global, end_global, begin_local, end_local);
|
|---|
| 659 |
|
|---|
| 660 | char *char_buf = Helper::GetCharArray(serial_data.str());
|
|---|
| 661 | MPI_File_write(file, char_buf, serial_data.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
|
|---|
| 662 | delete [] char_buf;
|
|---|
| 663 |
|
|---|
| 664 | FinalizeSerialOutputFile(file);
|
|---|
| 665 |
|
|---|
| 666 | }
|
|---|
| 667 | }
|
|---|
| 668 |
|
|---|
| 669 | void CommMPI::CreateParallelOutputFile(const Grid& grid, MPI_Comm& comm,
|
|---|
| 670 | const int& output_count, const char* information,
|
|---|
| 671 | const Index& begin_global, const Index& end_global,
|
|---|
| 672 | const Index& begin_local, const Index& end_local)
|
|---|
| 673 | {
|
|---|
| 674 | int rank;
|
|---|
| 675 | MPI_File file;
|
|---|
| 676 | char parallel_filename[513], serial_filename[513];
|
|---|
| 677 | std::stringstream buf;
|
|---|
| 678 |
|
|---|
| 679 | MPI_Comm_rank(comm, &rank);
|
|---|
| 680 |
|
|---|
| 681 | sprintf(parallel_filename, "%s%04d.pvti", OutputPath().c_str(), output_count);
|
|---|
| 682 | sprintf(serial_filename, "%04d_%d.vti", output_count, rank);
|
|---|
| 683 |
|
|---|
| 684 | MPI_File_open(comm, parallel_filename, MPI_MODE_WRONLY|MPI_MODE_CREATE, MPI_INFO_NULL, &file);
|
|---|
| 685 | MPI_File_set_size(file, 0);
|
|---|
| 686 |
|
|---|
| 687 | if (rank == 0) {
|
|---|
| 688 |
|
|---|
| 689 | buf << "<?xml version=\"1.0\"?>" << std::endl
|
|---|
| 690 | << "<VTKFile type=\"PImageData\" version=\"0.1\" byte_order=\"LittleEndian\">" << std::endl
|
|---|
| 691 | << " <PImageData WholeExtent=\"";
|
|---|
| 692 |
|
|---|
| 693 | for (int i=0; i<3; ++i)
|
|---|
| [ac6d04] | 694 | buf << begin_global[i] << " " << end_global[i] << " ";
|
|---|
| [dfed1c] | 695 |
|
|---|
| 696 | buf << "\"" << std::endl
|
|---|
| 697 | << " GhostLevel=\"0\" Origin=\"0 0 0\" Spacing=\"";
|
|---|
| 698 |
|
|---|
| 699 | for (int i=0; i<3; ++i)
|
|---|
| 700 | buf << grid.Extent().MeshWidth()[i] << " ";
|
|---|
| 701 |
|
|---|
| 702 | buf << "\">" << std::endl
|
|---|
| 703 | << " <PPointData Scalars=\"" << information << "\">" << std::endl
|
|---|
| 704 | << " <PDataArray type=\"Float32\" Name=\"" << information << "\"/>" << std::endl
|
|---|
| 705 | << " </PPointData>" << std::endl;
|
|---|
| 706 |
|
|---|
| 707 | char* char_buf = Helper::GetCharArray(buf.str());
|
|---|
| 708 |
|
|---|
| 709 | MPI_File_write_shared(file, char_buf, buf.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
|
|---|
| 710 |
|
|---|
| 711 | delete [] char_buf;
|
|---|
| 712 | }
|
|---|
| 713 |
|
|---|
| 714 | buf.str("");
|
|---|
| 715 |
|
|---|
| [ac6d04] | 716 | if ((end_local-begin_local).Product() > 0) {
|
|---|
| 717 | buf << " <Piece Extent=\"";
|
|---|
| 718 |
|
|---|
| 719 | for (int i=0; i<3; ++i)
|
|---|
| 720 | buf << begin_local[i] << " " << end_local[i] << " ";
|
|---|
| [dfed1c] | 721 |
|
|---|
| [ac6d04] | 722 | buf << "\" Source=\"" << serial_filename << "\"/>" << std::endl;
|
|---|
| 723 | }
|
|---|
| [dfed1c] | 724 |
|
|---|
| 725 | char* char_buf = Helper::GetCharArray(buf.str());
|
|---|
| 726 |
|
|---|
| 727 | MPI_File_write_ordered(file, char_buf, buf.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
|
|---|
| 728 |
|
|---|
| 729 | delete [] char_buf;
|
|---|
| 730 |
|
|---|
| 731 | if (rank == 0) {
|
|---|
| 732 |
|
|---|
| 733 | buf.str("");
|
|---|
| 734 |
|
|---|
| 735 | buf << " </PImageData>" << std::endl
|
|---|
| 736 | << "</VTKFile>" << std::endl;
|
|---|
| 737 |
|
|---|
| 738 | char* char_buf = Helper::GetCharArray(buf.str());
|
|---|
| 739 |
|
|---|
| 740 | MPI_File_write_shared(file, char_buf, buf.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
|
|---|
| 741 |
|
|---|
| 742 | delete [] char_buf;
|
|---|
| 743 | }
|
|---|
| 744 |
|
|---|
| 745 | MPI_File_close(&file);
|
|---|
| 746 | }
|
|---|
| 747 |
|
|---|
| 748 | MPI_File CommMPI::CreateSerialOutputFile(const Grid& grid, MPI_Comm& comm,
|
|---|
| 749 | const int& output_count, const char* information,
|
|---|
| 750 | const Index& begin_global, const Index& end_global,
|
|---|
| 751 | const Index& begin_local, const Index& end_local)
|
|---|
| 752 | {
|
|---|
| 753 | char serial_filename[513];
|
|---|
| 754 | int rank;
|
|---|
| 755 | MPI_File file;
|
|---|
| 756 | std::stringstream buf;
|
|---|
| 757 |
|
|---|
| 758 | MPI_Comm_rank(comm_global, &rank);
|
|---|
| 759 |
|
|---|
| 760 | sprintf(serial_filename, "%s%04d_%d.vti", OutputPath().c_str(), output_count, rank);
|
|---|
| 761 |
|
|---|
| 762 | MPI_File_open(MPI_COMM_SELF, serial_filename, MPI_MODE_WRONLY|MPI_MODE_CREATE, MPI_INFO_NULL, &file);
|
|---|
| 763 |
|
|---|
| 764 | buf << "<?xml version=\"1.0\"?>" << std::endl
|
|---|
| 765 | << "<VTKFile type=\"ImageData\" version=\"0.1\" byte_order=\"LittleEndian\">" << std::endl
|
|---|
| 766 | << " <ImageData WholeExtent=\"";
|
|---|
| 767 |
|
|---|
| 768 | for (int i=0; i<3; ++i)
|
|---|
| [ac6d04] | 769 | buf << begin_global[i] << " " << end_global[i] << " ";
|
|---|
| [dfed1c] | 770 |
|
|---|
| 771 | buf << "\"" << std::endl
|
|---|
| 772 | << " Origin=\"0 0 0\" Spacing=\"";
|
|---|
| 773 |
|
|---|
| 774 | for (int i=0; i<3; ++i)
|
|---|
| 775 | buf << grid.Extent().MeshWidth()[i] << " ";
|
|---|
| 776 |
|
|---|
| 777 | buf << "\">" << std::endl
|
|---|
| 778 | << " <Piece Extent=\"";
|
|---|
| 779 |
|
|---|
| 780 | for (int i=0; i<3; ++i)
|
|---|
| [ac6d04] | 781 | buf << begin_local[i] << " " << end_local[i] << " ";
|
|---|
| [dfed1c] | 782 |
|
|---|
| 783 | buf << "\">" << std::endl
|
|---|
| 784 | << " <PointData Scalars=\"" << information << "\">" << std::endl
|
|---|
| 785 | << " <DataArray type=\"Float32\" Name=\"" << information << "\" format=\"ascii\">" << std::endl
|
|---|
| 786 | << " ";
|
|---|
| 787 |
|
|---|
| 788 | char* char_buf = Helper::GetCharArray(buf.str());
|
|---|
| 789 | MPI_File_write(file, char_buf, buf.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
|
|---|
| 790 | delete [] char_buf;
|
|---|
| 791 |
|
|---|
| 792 | return file;
|
|---|
| 793 | }
|
|---|
| 794 |
|
|---|
| 795 | void CommMPI::FinalizeSerialOutputFile(MPI_File& file)
|
|---|
| 796 | {
|
|---|
| 797 | std::stringstream buf;
|
|---|
| 798 |
|
|---|
| 799 | buf << std::endl
|
|---|
| 800 | << " </DataArray>" << std::endl
|
|---|
| 801 | << " </PointData>" << std::endl
|
|---|
| 802 | << " </Piece>" << std::endl
|
|---|
| 803 | << " </ImageData>" << std::endl
|
|---|
| 804 | << "</VTKFile>" << std::endl;
|
|---|
| 805 |
|
|---|
| 806 | char* char_buf = Helper::GetCharArray(buf.str());
|
|---|
| 807 | MPI_File_write(file, char_buf, buf.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
|
|---|
| 808 | delete [] char_buf;
|
|---|
| 809 |
|
|---|
| 810 | MPI_File_close(&file);
|
|---|
| 811 | }
|
|---|
| 812 |
|
|---|
| 813 | int CommMPI::GlobalRank() const
|
|---|
| 814 | {
|
|---|
| 815 | int rank;
|
|---|
| 816 | MPI_Comm_rank(comm_global, &rank);
|
|---|
| 817 | return rank;
|
|---|
| 818 | }
|
|---|
| 819 |
|
|---|
| [ac6d04] | 820 | int CommMPI::GlobalSize() const
|
|---|
| 821 | {
|
|---|
| 822 | int size;
|
|---|
| 823 | MPI_Comm_size(comm_global, &size);
|
|---|
| 824 | return size;
|
|---|
| 825 | }
|
|---|
| 826 |
|
|---|
| [dfed1c] | 827 | Index CommMPI::GlobalPos() const
|
|---|
| 828 | {
|
|---|
| 829 | Index dims, periods, coords;
|
|---|
| 830 | MPI_Cart_get(comm_global, 3, dims.vec(), periods.vec(), coords.vec());
|
|---|
| 831 | return coords;
|
|---|
| 832 | }
|
|---|
| 833 |
|
|---|
| 834 | Index CommMPI::GlobalProcs() const
|
|---|
| 835 | {
|
|---|
| 836 | Index dims, periods, coords;
|
|---|
| 837 | MPI_Cart_get(comm_global, 3, dims.vec(), periods.vec(), coords.vec());
|
|---|
| [ac6d04] | 838 | return dims;
|
|---|
| 839 | }
|
|---|
| 840 |
|
|---|
| 841 | int CommMPI::Rank(const Grid& grid) const
|
|---|
| 842 | {
|
|---|
| 843 | int rank;
|
|---|
| 844 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
|---|
| 845 | assert(comm != MPI_COMM_NULL);
|
|---|
| 846 | MPI_Comm_rank(comm, &rank);
|
|---|
| 847 | return rank;
|
|---|
| 848 | }
|
|---|
| 849 |
|
|---|
| 850 | int CommMPI::Size(const Grid& grid) const
|
|---|
| 851 | {
|
|---|
| 852 | int size;
|
|---|
| 853 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
|---|
| 854 | assert(comm != MPI_COMM_NULL);
|
|---|
| 855 | MPI_Comm_size(comm, &size);
|
|---|
| 856 | return size;
|
|---|
| 857 | }
|
|---|
| [dfed1c] | 858 |
|
|---|
| [ac6d04] | 859 | Index CommMPI::Pos(const Grid& grid) const
|
|---|
| 860 | {
|
|---|
| 861 | Index dims, periods, coords;
|
|---|
| 862 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
|---|
| 863 | assert(comm != MPI_COMM_NULL);
|
|---|
| 864 | MPI_Cart_get(comm, 3, dims.vec(), periods.vec(), coords.vec());
|
|---|
| 865 | return coords;
|
|---|
| 866 | }
|
|---|
| 867 |
|
|---|
| 868 | Index CommMPI::Procs(const Grid& grid) const
|
|---|
| 869 | {
|
|---|
| 870 | Index dims, periods, coords;
|
|---|
| 871 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
|---|
| 872 | assert(comm != MPI_COMM_NULL);
|
|---|
| 873 | MPI_Cart_get(comm, 3, dims.vec(), periods.vec(), coords.vec());
|
|---|
| [dfed1c] | 874 | return dims;
|
|---|
| 875 | }
|
|---|
| 876 |
|
|---|
| 877 | void CommMPI::InitCommMPI(const MPI_Comm& comm)
|
|---|
| 878 | {
|
|---|
| 879 | int status, size, rank;
|
|---|
| 880 | int dims[3] = {0, 0, 0};
|
|---|
| 881 | int periods[3];
|
|---|
| 882 |
|
|---|
| 883 | for (int i=0; i<3; ++i)
|
|---|
| 884 | periods[i] = (BoundaryConditions()[i] == Periodic ? 1 : 0);
|
|---|
| 885 |
|
|---|
| 886 | MPI_Comm_size(comm, &size);
|
|---|
| 887 | MPI_Comm_rank(comm, &rank);
|
|---|
| 888 |
|
|---|
| 889 | MPI_Topo_test(comm, &status);
|
|---|
| 890 |
|
|---|
| 891 | if (status == MPI_CART) {
|
|---|
| 892 |
|
|---|
| 893 | comm_global = comm;
|
|---|
| 894 |
|
|---|
| 895 | }else {
|
|---|
| 896 |
|
|---|
| [71b148] | 897 | const int log2 = Helper::log_2(size);
|
|---|
| 898 |
|
|---|
| 899 | if (Helper::intpow(2, log2) == size) {
|
|---|
| 900 | for (int i=0; i<3; ++i)
|
|---|
| 901 | dims[i] = Helper::intpow(2, log2 / 3 + (log2%3 > i ? 1 : 0));
|
|---|
| 902 | }else {
|
|---|
| 903 | MPI_Dims_create(size, 3, dims);
|
|---|
| 904 | }
|
|---|
| 905 |
|
|---|
| [d7de94] | 906 | #ifdef DEBUG_OUTPUT
|
|---|
| 907 | if (rank == 0)
|
|---|
| 908 | std::printf("Dims: %d %d %d\n", dims[0], dims[1], dims[2]);
|
|---|
| 909 | #endif
|
|---|
| [dfed1c] | 910 |
|
|---|
| 911 | MPI_Cart_create(comm, 3, dims, periods, 1, &comm_global);
|
|---|
| 912 |
|
|---|
| 913 | }
|
|---|
| 914 |
|
|---|
| 915 | MPI_Info_create(&info);
|
|---|
| 916 | char key[] = "no_locks";
|
|---|
| 917 | char val[] = "true";
|
|---|
| 918 | MPI_Info_set(info, key, val);
|
|---|
| 919 |
|
|---|
| 920 | }
|
|---|
| 921 |
|
|---|
| 922 | CommMPI::~CommMPI()
|
|---|
| 923 | {
|
|---|
| 924 | MPI_Comm_free(&comm_global);
|
|---|
| [ac6d04] | 925 | #ifdef VMG_ONE_SIDED
|
|---|
| [dfed1c] | 926 | if (win_created)
|
|---|
| 927 | MPI_Win_free(&win);
|
|---|
| [ac6d04] | 928 | #endif
|
|---|
| [dfed1c] | 929 | MPI_Info_free(&info);
|
|---|
| [894a5f] | 930 | }
|
|---|
| [dfed1c] | 931 |
|
|---|
| [894a5f] | 932 | Grid& CommMPI::GetCoarserGrid(Multigrid& multigrid)
|
|---|
| 933 | {
|
|---|
| 934 | return settings.CoarserGrid(multigrid(multigrid.Level()));
|
|---|
| [dfed1c] | 935 | }
|
|---|
| 936 |
|
|---|
| [894a5f] | 937 | Grid& CommMPI::GetFinerGrid(Multigrid& multigrid)
|
|---|
| 938 | {
|
|---|
| 939 | return settings.FinerGrid(multigrid(multigrid.Level()));
|
|---|
| 940 | }
|
|---|
| 941 |
|
|---|
| [dfed1c] | 942 | std::string CommMPI::CreateOutputDirectory()
|
|---|
| 943 | {
|
|---|
| 944 | #ifdef HAVE_BOOST_FILESYSTEM
|
|---|
| 945 | std::string path, unique_path;
|
|---|
| 946 | std::stringstream unique_suffix;
|
|---|
| 947 | int suffix_counter = 0;
|
|---|
| 948 | char buffer[129];
|
|---|
| 949 | time_t rawtime;
|
|---|
| 950 | struct tm *timeinfo;
|
|---|
| 951 | int path_size;
|
|---|
| 952 | char* path_array;
|
|---|
| 953 |
|
|---|
| 954 | if (GlobalRank() == 0) {
|
|---|
| 955 |
|
|---|
| 956 | time(&rawtime);
|
|---|
| 957 | timeinfo = localtime(&rawtime);
|
|---|
| 958 | strftime(buffer, 128, "./output/%Y_%m_%d_%H_%M_%S/", timeinfo);
|
|---|
| 959 | path = buffer;
|
|---|
| 960 |
|
|---|
| 961 | if (!fs::exists("output"))
|
|---|
| 962 | fs::create_directory("output");
|
|---|
| 963 |
|
|---|
| 964 | unique_path = path;
|
|---|
| 965 |
|
|---|
| 966 | while (fs::exists(unique_path.c_str())) {
|
|---|
| 967 |
|
|---|
| 968 | unique_suffix.str("");
|
|---|
| 969 | unique_suffix << "_" << suffix_counter++ << "/";
|
|---|
| 970 |
|
|---|
| 971 | unique_path = path;
|
|---|
| 972 | unique_path.replace(unique_path.size()-1, 1, unique_suffix.str());
|
|---|
| 973 |
|
|---|
| 974 | }
|
|---|
| 975 |
|
|---|
| 976 | fs::create_directory(unique_path.c_str());
|
|---|
| 977 |
|
|---|
| 978 | path_size = unique_path.size() + 1;
|
|---|
| 979 | path_array = Helper::GetCharArray(unique_path);
|
|---|
| 980 |
|
|---|
| 981 | MPI_Bcast(&path_size, 1, MPI_INT, 0, comm_global);
|
|---|
| 982 |
|
|---|
| 983 | }else {
|
|---|
| 984 |
|
|---|
| 985 | MPI_Bcast(&path_size, 1, MPI_INT, 0, comm_global);
|
|---|
| 986 | path_array = new char[path_size];
|
|---|
| 987 |
|
|---|
| 988 | }
|
|---|
| 989 |
|
|---|
| 990 | MPI_Bcast(path_array, path_size, MPI_CHAR, 0, comm_global);
|
|---|
| 991 |
|
|---|
| 992 | unique_path = path_array;
|
|---|
| 993 |
|
|---|
| 994 | delete [] path_array;
|
|---|
| 995 |
|
|---|
| 996 | return unique_path;
|
|---|
| 997 |
|
|---|
| 998 | #else
|
|---|
| 999 |
|
|---|
| 1000 | return "./";
|
|---|
| 1001 |
|
|---|
| 1002 | #endif
|
|---|
| 1003 | }
|
|---|
| 1004 |
|
|---|
| 1005 |
|
|---|
| 1006 | #endif /* HAVE_MPI */
|
|---|