| [fcf7f6] | 1 | /*
|
|---|
| 2 | * vmg - a versatile multigrid solver
|
|---|
| 3 | * Copyright (C) 2012 Institute for Numerical Simulation, University of Bonn
|
|---|
| 4 | *
|
|---|
| 5 | * vmg is free software: you can redistribute it and/or modify
|
|---|
| 6 | * it under the terms of the GNU General Public License as published by
|
|---|
| 7 | * the Free Software Foundation, either version 3 of the License, or
|
|---|
| 8 | * (at your option) any later version.
|
|---|
| 9 | *
|
|---|
| 10 | * vmg is distributed in the hope that it will be useful,
|
|---|
| 11 | * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|---|
| 12 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|---|
| 13 | * GNU General Public License for more details.
|
|---|
| 14 | *
|
|---|
| 15 | * You should have received a copy of the GNU General Public License
|
|---|
| 16 | * along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|---|
| 17 | */
|
|---|
| 18 |
|
|---|
| [dfed1c] | 19 | /**
|
|---|
| 20 | * @file comm_mpi.cpp
|
|---|
| 21 | * @author Julian Iseringhausen <isering@ins.uni-bonn.de>
|
|---|
| 22 | * @date Wed Jun 16 13:21:06 2010
|
|---|
| 23 | *
|
|---|
| 24 | * @brief Class for MPI-based communication.
|
|---|
| 25 | *
|
|---|
| 26 | */
|
|---|
| 27 |
|
|---|
| 28 | #ifdef HAVE_CONFIG_H
|
|---|
| 29 | #include <config.h>
|
|---|
| 30 | #endif
|
|---|
| 31 |
|
|---|
| 32 | #ifdef HAVE_MPI
|
|---|
| 33 |
|
|---|
| 34 | #include <mpi.h>
|
|---|
| [ac6d04] | 35 | #ifdef HAVE_MARMOT
|
|---|
| 36 | #include <enhancempicalls.h>
|
|---|
| 37 | #include <sourceinfompicalls.h>
|
|---|
| 38 | #endif
|
|---|
| [dfed1c] | 39 |
|
|---|
| 40 | #ifdef HAVE_BOOST_FILESYSTEM
|
|---|
| 41 | #include <boost/filesystem.hpp>
|
|---|
| 42 | namespace fs = boost::filesystem;
|
|---|
| 43 | #endif
|
|---|
| 44 |
|
|---|
| [ac6d04] | 45 | #ifdef HAVE_VTK
|
|---|
| 46 | #include <vtkAbstractArray.h>
|
|---|
| 47 | #include <vtkImageData.h>
|
|---|
| 48 | #include <vtkPointData.h>
|
|---|
| 49 | #include <vtkSmartPointer.h>
|
|---|
| 50 | #include <vtkXMLImageDataWriter.h>
|
|---|
| 51 | #endif
|
|---|
| 52 |
|
|---|
| [dfed1c] | 53 | #include <cstring>
|
|---|
| 54 | #include <sstream>
|
|---|
| 55 |
|
|---|
| 56 | #include "base/helper.hpp"
|
|---|
| [894a5f] | 57 | #include "base/tuple.hpp"
|
|---|
| [dfed1c] | 58 | #include "comm/comm_mpi.hpp"
|
|---|
| [ac6d04] | 59 | #include "comm/mpi/datatypes_local.hpp"
|
|---|
| [dfed1c] | 60 | #include "grid/grid.hpp"
|
|---|
| 61 | #include "grid/multigrid.hpp"
|
|---|
| 62 | #include "grid/tempgrid.hpp"
|
|---|
| 63 | #include "mg.hpp"
|
|---|
| [894a5f] | 64 | #include "base/timer.hpp"
|
|---|
| [f003a9] | 65 |
|
|---|
| [dfed1c] | 66 | static char print_buffer[512];
|
|---|
| 67 |
|
|---|
| 68 | using namespace VMG;
|
|---|
| 69 |
|
|---|
| [894a5f] | 70 | void CommMPI::IsendAll(Grid& grid, std::vector<VMG::MPI::Datatype>& types, const MPI_Comm& comm, const int& tag_start)
|
|---|
| 71 | {
|
|---|
| [ac6d04] | 72 | for (std::vector<VMG::MPI::Datatype>::const_iterator iter=types.begin(); iter!=types.end(); ++iter)
|
|---|
| 73 | iter->Isend(grid, tag_start, comm, Request());
|
|---|
| [894a5f] | 74 | }
|
|---|
| [dfed1c] | 75 |
|
|---|
| [ac6d04] | 76 | void CommMPI::IrecvAll(Grid& grid, std::vector<VMG::MPI::Datatype>& types, const MPI_Comm& comm, const int& tag_start)
|
|---|
| [894a5f] | 77 | {
|
|---|
| [ac6d04] | 78 | for (std::vector<VMG::MPI::Datatype>::const_iterator iter=types.begin(); iter!=types.end(); ++iter)
|
|---|
| 79 | iter->Irecv(grid, tag_start, comm, Request());
|
|---|
| [894a5f] | 80 | }
|
|---|
| [dfed1c] | 81 |
|
|---|
| [ac6d04] | 82 | void CommMPI::IsendAllBuffered(const Grid& grid, std::vector<VMG::MPI::Datatype>& types, const MPI_Comm& comm, const int& tag_start)
|
|---|
| [894a5f] | 83 | {
|
|---|
| [ac6d04] | 84 | for (std::vector<VMG::MPI::Datatype>::iterator iter=types.begin(); iter!=types.end(); ++iter)
|
|---|
| 85 | iter->IsendBuffered(grid, tag_start, comm, Request());
|
|---|
| [894a5f] | 86 | }
|
|---|
| [dfed1c] | 87 |
|
|---|
| [894a5f] | 88 | void CommMPI::IrecvAllBuffered(std::vector<VMG::MPI::Datatype>& types, const MPI_Comm& comm, const int& tag_start)
|
|---|
| 89 | {
|
|---|
| [ac6d04] | 90 | for (std::vector<VMG::MPI::Datatype>::iterator iter=types.begin(); iter!=types.end(); ++iter)
|
|---|
| 91 | iter->IrecvBuffered(tag_start, comm, Request());
|
|---|
| [894a5f] | 92 | }
|
|---|
| [dfed1c] | 93 |
|
|---|
| [ac6d04] | 94 | void CommMPI::ReplaceBufferAll(Grid& grid, const std::vector<VMG::MPI::Datatype>& types)
|
|---|
| [894a5f] | 95 | {
|
|---|
| [ac6d04] | 96 | for (std::vector<VMG::MPI::Datatype>::const_iterator iter=types.begin(); iter!= types.end(); ++iter)
|
|---|
| 97 | iter->GridReplace(grid);
|
|---|
| 98 | }
|
|---|
| [dfed1c] | 99 |
|
|---|
| [ac6d04] | 100 | void CommMPI::AddBufferAll(Grid& grid, const std::vector<VMG::MPI::Datatype>& types)
|
|---|
| 101 | {
|
|---|
| 102 | for (std::vector<VMG::MPI::Datatype>::const_iterator iter=types.begin(); iter!= types.end(); ++iter)
|
|---|
| 103 | iter->GridSum(grid);
|
|---|
| [894a5f] | 104 | }
|
|---|
| [dfed1c] | 105 |
|
|---|
| [ac6d04] | 106 | void CommMPI::CommSubgrid(Grid& grid_old, Grid& grid_new, const int& direction)
|
|---|
| [894a5f] | 107 | {
|
|---|
| 108 | MPI_Comm comm = settings.CommunicatorGlobal(grid_old);
|
|---|
| 109 | if (comm != MPI_COMM_NULL) {
|
|---|
| [ac6d04] | 110 | VMG::MPI::DatatypesGlobal& datatypes = settings.DatatypesGlobal(grid_old, grid_new, direction);
|
|---|
| 111 | IrecvAllBuffered(datatypes.Receive(), comm, 0411);
|
|---|
| 112 | IsendAllBuffered(grid_old, datatypes.Send(), comm, 0411);
|
|---|
| 113 | WaitAll();
|
|---|
| 114 | ReplaceBufferAll(grid_new, datatypes.Receive());
|
|---|
| [dfed1c] | 115 | }
|
|---|
| 116 | }
|
|---|
| 117 |
|
|---|
| [ac6d04] | 118 | void CommMPI::CommAddSubgrid(Grid& grid_old, Grid& grid_new, const int& direction)
|
|---|
| [dfed1c] | 119 | {
|
|---|
| [894a5f] | 120 | MPI_Comm comm = settings.CommunicatorGlobal(grid_old);
|
|---|
| [dfed1c] | 121 | if (comm != MPI_COMM_NULL) {
|
|---|
| [ac6d04] | 122 | VMG::MPI::DatatypesGlobal& datatypes = settings.DatatypesGlobal(grid_old, grid_new, direction);
|
|---|
| [894a5f] | 123 | IrecvAllBuffered(datatypes.Receive(), comm, 1806);
|
|---|
| [ac6d04] | 124 | IsendAllBuffered(grid_old, datatypes.Send(), comm, 1806);
|
|---|
| 125 | WaitAll();
|
|---|
| [894a5f] | 126 | AddBufferAll(grid_new, datatypes.Receive());
|
|---|
| [dfed1c] | 127 | }
|
|---|
| 128 | }
|
|---|
| 129 |
|
|---|
| [ac6d04] | 130 | void CommMPI::CommToGhosts(Grid& grid)
|
|---|
| [dfed1c] | 131 | {
|
|---|
| [894a5f] | 132 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
|---|
| [dfed1c] | 133 | if (comm != MPI_COMM_NULL) {
|
|---|
| [894a5f] | 134 | VMG::MPI::DatatypesLocal& types = settings.DatatypesLocal(grid);
|
|---|
| [ac6d04] | 135 | IrecvAllBuffered(types.Halo(), comm, 2310);
|
|---|
| 136 | IsendAllBuffered(grid, types.NB(), comm, 2310);
|
|---|
| 137 | WaitAll();
|
|---|
| 138 | ReplaceBufferAll(grid, types.Halo());
|
|---|
| [dfed1c] | 139 | }
|
|---|
| 140 | }
|
|---|
| 141 |
|
|---|
| [ac6d04] | 142 | void CommMPI::CommToGhostsAsyncStart(Grid& grid)
|
|---|
| [dfed1c] | 143 | {
|
|---|
| [ac6d04] | 144 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
|---|
| [dfed1c] | 145 | if (comm != MPI_COMM_NULL) {
|
|---|
| [894a5f] | 146 | VMG::MPI::DatatypesLocal& types = settings.DatatypesLocal(grid);
|
|---|
| [ac6d04] | 147 | IrecvAllBuffered(types.Halo(), comm, 2412);
|
|---|
| 148 | IsendAllBuffered(grid, types.NB(), comm, 2412);
|
|---|
| 149 | TestAll();
|
|---|
| [894a5f] | 150 | }
|
|---|
| 151 | }
|
|---|
| [dfed1c] | 152 |
|
|---|
| [ac6d04] | 153 | void CommMPI::CommToGhostsAsyncFinish(Grid& grid)
|
|---|
| [894a5f] | 154 | {
|
|---|
| [ac6d04] | 155 | WaitAll();
|
|---|
| 156 | ReplaceBufferAll(grid, settings.DatatypesLocal(grid).Halo());
|
|---|
| [894a5f] | 157 | }
|
|---|
| [dfed1c] | 158 |
|
|---|
| [ac6d04] | 159 | void CommMPI::CommFromGhosts(Grid& grid)
|
|---|
| [894a5f] | 160 | {
|
|---|
| 161 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
|---|
| 162 | if (comm != MPI_COMM_NULL) {
|
|---|
| 163 | VMG::MPI::DatatypesLocal& types = settings.DatatypesLocal(grid);
|
|---|
| [ac6d04] | 164 | IrecvAllBuffered(types.NB(), comm, 1337);
|
|---|
| 165 | IsendAllBuffered(grid, types.Halo(), comm, 1337);
|
|---|
| 166 | WaitAll();
|
|---|
| 167 | AddBufferAll(grid, types.NB());
|
|---|
| [97c25dd] | 168 | }
|
|---|
| 169 | }
|
|---|
| 170 |
|
|---|
| [ac6d04] | 171 | void CommMPI::CommFromGhostsAsyncStart(Grid& grid)
|
|---|
| [dfed1c] | 172 | {
|
|---|
| [8180d8] | 173 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
|---|
| 174 | if (comm != MPI_COMM_NULL) {
|
|---|
| 175 | VMG::MPI::DatatypesLocal& types = settings.DatatypesLocal(grid);
|
|---|
| 176 | IrecvAllBuffered(types.NB(), comm, 0xc0ffee);
|
|---|
| 177 | IsendAllBuffered(grid, types.Halo(), comm, 0xc0ffee);
|
|---|
| 178 | TestAll();
|
|---|
| 179 | }
|
|---|
| [dfed1c] | 180 | }
|
|---|
| 181 |
|
|---|
| [ac6d04] | 182 | void CommMPI::CommFromGhostsAsyncFinish(Grid& grid)
|
|---|
| [dfed1c] | 183 | {
|
|---|
| [ac6d04] | 184 | WaitAll();
|
|---|
| 185 | AddBufferAll(grid, settings.DatatypesLocal(grid).NB());
|
|---|
| [dfed1c] | 186 | }
|
|---|
| 187 |
|
|---|
| [2a5451] | 188 | void CommMPI::Barrier()
|
|---|
| 189 | {
|
|---|
| 190 | MPI_Barrier(comm_global);
|
|---|
| 191 | }
|
|---|
| 192 |
|
|---|
| [ef94e7] | 193 | vmg_float CommMPI::GlobalSum(vmg_float value)
|
|---|
| [dfed1c] | 194 | {
|
|---|
| [ef94e7] | 195 | vmg_float result = 0;
|
|---|
| 196 | MPI_Allreduce(&value, &result, 1, MPI_DOUBLE, MPI_SUM, comm_global);
|
|---|
| [ac6d04] | 197 | return result;
|
|---|
| 198 | }
|
|---|
| [dfed1c] | 199 |
|
|---|
| [ef94e7] | 200 | vmg_float CommMPI::GlobalSumRoot(vmg_float value)
|
|---|
| [ac6d04] | 201 | {
|
|---|
| [ef94e7] | 202 | vmg_float result = 0;
|
|---|
| 203 | MPI_Reduce(&value, &result, 1, MPI_DOUBLE, MPI_SUM, 0, comm_global);
|
|---|
| 204 | return result;
|
|---|
| [dfed1c] | 205 | }
|
|---|
| 206 |
|
|---|
| [ac6d04] | 207 | void CommMPI::GlobalSumArray(vmg_float* array, const vmg_int& size)
|
|---|
| [dfed1c] | 208 | {
|
|---|
| [ac6d04] | 209 | MPI_Allreduce(MPI_IN_PLACE, array, size, MPI_DOUBLE, MPI_SUM, comm_global);
|
|---|
| 210 | }
|
|---|
| [dfed1c] | 211 |
|
|---|
| [2a5451] | 212 | void CommMPI::GlobalBroadcast(vmg_float& value)
|
|---|
| 213 | {
|
|---|
| 214 | MPI_Bcast(&value, 1, MPI_DOUBLE, 0, comm_global);
|
|---|
| 215 | }
|
|---|
| 216 |
|
|---|
| 217 | void CommMPI::GlobalGather(vmg_float& value, vmg_float* array)
|
|---|
| 218 | {
|
|---|
| 219 | MPI_Gather(&value, 1, MPI_DOUBLE, array, 1, MPI_DOUBLE, 0, comm_global);
|
|---|
| 220 | }
|
|---|
| 221 |
|
|---|
| [ef94e7] | 222 | vmg_int CommMPI::GlobalSum(vmg_int value)
|
|---|
| [ac6d04] | 223 | {
|
|---|
| [ef94e7] | 224 | vmg_int result = 0;
|
|---|
| 225 | MPI_Allreduce(&value, &result, 1, MPI_INT, MPI_SUM, comm_global);
|
|---|
| [ac6d04] | 226 | return result;
|
|---|
| 227 | }
|
|---|
| [dfed1c] | 228 |
|
|---|
| [ef94e7] | 229 | vmg_int CommMPI::GlobalSumRoot(vmg_int value)
|
|---|
| [ac6d04] | 230 | {
|
|---|
| [ef94e7] | 231 | vmg_int result = 0;
|
|---|
| 232 | MPI_Reduce(&value, &result, 1, MPI_INT, MPI_SUM, 0, comm_global);
|
|---|
| 233 | return result;
|
|---|
| [ac6d04] | 234 | }
|
|---|
| [dfed1c] | 235 |
|
|---|
| [ac6d04] | 236 | void CommMPI::GlobalSumArray(vmg_int* array, const vmg_int& size)
|
|---|
| 237 | {
|
|---|
| 238 | MPI_Allreduce(MPI_IN_PLACE, array, size, MPI_INT, MPI_SUM, comm_global);
|
|---|
| [dfed1c] | 239 | }
|
|---|
| 240 |
|
|---|
| [ef94e7] | 241 | vmg_float CommMPI::GlobalMax(vmg_float value)
|
|---|
| [b51c3b] | 242 | {
|
|---|
| [ef94e7] | 243 | vmg_float result = 0.0;
|
|---|
| 244 | MPI_Allreduce(&value, &result, 1, MPI_DOUBLE, MPI_MAX, comm_global);
|
|---|
| [b51c3b] | 245 | return result;
|
|---|
| 246 | }
|
|---|
| 247 |
|
|---|
| [ef94e7] | 248 | vmg_float CommMPI::GlobalMaxRoot(vmg_float value)
|
|---|
| [b51c3b] | 249 | {
|
|---|
| [ef94e7] | 250 | vmg_float result = 0.0;
|
|---|
| 251 | MPI_Reduce(&value, &result, 1, MPI_DOUBLE, MPI_MAX, 0, comm_global);
|
|---|
| 252 | return result;
|
|---|
| [b51c3b] | 253 | }
|
|---|
| 254 |
|
|---|
| 255 | void CommMPI::GlobalMaxArray(vmg_float* array, const vmg_int& size)
|
|---|
| 256 | {
|
|---|
| 257 | MPI_Allreduce(MPI_IN_PLACE, array, size, MPI_DOUBLE, MPI_MAX, comm_global);
|
|---|
| 258 | }
|
|---|
| 259 |
|
|---|
| [ef94e7] | 260 | vmg_int CommMPI::GlobalMax(vmg_int value)
|
|---|
| [b51c3b] | 261 | {
|
|---|
| [ef94e7] | 262 | vmg_int result = 0;
|
|---|
| 263 | MPI_Allreduce(&value, &result, 1, MPI_INT, MPI_MAX, comm_global);
|
|---|
| [b51c3b] | 264 | return result;
|
|---|
| 265 | }
|
|---|
| 266 |
|
|---|
| [ef94e7] | 267 | vmg_int CommMPI::GlobalMaxRoot(vmg_int value)
|
|---|
| [b51c3b] | 268 | {
|
|---|
| [ef94e7] | 269 | vmg_int result = 0;
|
|---|
| 270 | MPI_Reduce(&value, &result, 1, MPI_INT, MPI_MAX, 0, comm_global);
|
|---|
| 271 | return result;
|
|---|
| [b51c3b] | 272 | }
|
|---|
| 273 |
|
|---|
| 274 | void CommMPI::GlobalMaxArray(vmg_int* array, const vmg_int& size)
|
|---|
| 275 | {
|
|---|
| 276 | MPI_Allreduce(MPI_IN_PLACE, array, size, MPI_INT, MPI_MAX, comm_global);
|
|---|
| 277 | }
|
|---|
| 278 |
|
|---|
| [2a5451] | 279 | void CommMPI::GlobalBroadcast(vmg_int& value)
|
|---|
| 280 | {
|
|---|
| 281 | MPI_Bcast(&value, 1, MPI_INT, 0, comm_global);
|
|---|
| 282 | }
|
|---|
| 283 |
|
|---|
| 284 | void CommMPI::GlobalGather(vmg_int& value, vmg_int* array)
|
|---|
| 285 | {
|
|---|
| 286 | MPI_Gather(&value, 1, MPI_INT, array, 1, MPI_INT, 0, comm_global);
|
|---|
| 287 | }
|
|---|
| 288 |
|
|---|
| 289 | void CommMPI::GlobalBroadcast(char* str)
|
|---|
| 290 | {
|
|---|
| [290aa3] | 291 | int size = std::strlen(str) + 1;
|
|---|
| 292 | MPI_Bcast(&size, 1, MPI_INT, 0, comm_global);
|
|---|
| 293 | MPI_Bcast(str, size, MPI_CHAR, 0, comm_global);
|
|---|
| [2a5451] | 294 | }
|
|---|
| 295 |
|
|---|
| [ef94e7] | 296 | vmg_float CommMPI::LevelSum(const Grid& grid, vmg_float value)
|
|---|
| [dfed1c] | 297 | {
|
|---|
| [ef94e7] | 298 | vmg_float result = 0.0;
|
|---|
| [ac6d04] | 299 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
|---|
| 300 | assert(comm != MPI_COMM_NULL);
|
|---|
| [ef94e7] | 301 | MPI_Allreduce(&value, &result, 1, MPI_DOUBLE, MPI_SUM, comm);
|
|---|
| [dfed1c] | 302 | return result;
|
|---|
| 303 | }
|
|---|
| 304 |
|
|---|
| [ef94e7] | 305 | vmg_float CommMPI::LevelSumRoot(const Grid& grid, vmg_float value)
|
|---|
| [dfed1c] | 306 | {
|
|---|
| [ef94e7] | 307 | vmg_float result = 0.0;
|
|---|
| [ac6d04] | 308 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
|---|
| 309 | assert(comm != MPI_COMM_NULL);
|
|---|
| [ef94e7] | 310 | MPI_Reduce(&value, &result, 1, MPI_DOUBLE, MPI_SUM, 0, comm);
|
|---|
| 311 | return result;
|
|---|
| [ac6d04] | 312 | }
|
|---|
| [dfed1c] | 313 |
|
|---|
| [ac6d04] | 314 | void CommMPI::LevelSumArray(const Grid& grid, vmg_float* array, const vmg_int& size)
|
|---|
| 315 | {
|
|---|
| 316 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
|---|
| 317 | assert(comm != MPI_COMM_NULL);
|
|---|
| 318 | MPI_Allreduce(MPI_IN_PLACE, array, size, MPI_DOUBLE, MPI_SUM, comm);
|
|---|
| 319 | }
|
|---|
| [dfed1c] | 320 |
|
|---|
| [ef94e7] | 321 | vmg_int CommMPI::LevelSum(const Grid& grid, vmg_int value)
|
|---|
| [ac6d04] | 322 | {
|
|---|
| [ef94e7] | 323 | vmg_int result = 0;
|
|---|
| [ac6d04] | 324 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
|---|
| 325 | assert(comm != MPI_COMM_NULL);
|
|---|
| [ef94e7] | 326 | MPI_Allreduce(&value, &result, 1, MPI_INT, MPI_SUM, comm);
|
|---|
| [ac6d04] | 327 | return result;
|
|---|
| 328 | }
|
|---|
| 329 |
|
|---|
| [ef94e7] | 330 | vmg_int CommMPI::LevelSumRoot(const Grid& grid, vmg_int value)
|
|---|
| [ac6d04] | 331 | {
|
|---|
| [ef94e7] | 332 | vmg_int result = 0;
|
|---|
| [ac6d04] | 333 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
|---|
| 334 | assert(comm != MPI_COMM_NULL);
|
|---|
| [ef94e7] | 335 | MPI_Reduce(&value, &result, 1, MPI_INT, MPI_SUM, 0, comm);
|
|---|
| 336 | return result;
|
|---|
| [dfed1c] | 337 | }
|
|---|
| 338 |
|
|---|
| [ac6d04] | 339 | void CommMPI::LevelSumArray(const Grid& grid, vmg_int* array, const vmg_int& size)
|
|---|
| [dfed1c] | 340 | {
|
|---|
| [ac6d04] | 341 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
|---|
| 342 | assert(comm != MPI_COMM_NULL);
|
|---|
| 343 | MPI_Allreduce(MPI_IN_PLACE, array, size, MPI_INT, MPI_SUM, comm);
|
|---|
| [dfed1c] | 344 | }
|
|---|
| 345 |
|
|---|
| [43f24e5] | 346 | //TODO: Variable length buffer
|
|---|
| [d13e27] | 347 | void CommMPI::Print(const OutputLevel level, const char* format, ...)
|
|---|
| [dfed1c] | 348 | {
|
|---|
| [d13e27] | 349 | bool print = (level == Output);
|
|---|
| 350 |
|
|---|
| 351 | #ifdef OUTPUT_INFO
|
|---|
| 352 | print |= (level == Info);
|
|---|
| 353 | #endif
|
|---|
| 354 | #ifdef OUTPUT_DEBUG
|
|---|
| 355 | print |= (level == Debug);
|
|---|
| 356 | #endif
|
|---|
| 357 | #ifdef OUTPUT_TIMING
|
|---|
| 358 | print |= (level == Timing);
|
|---|
| 359 | #endif
|
|---|
| 360 |
|
|---|
| 361 | if (print) {
|
|---|
| 362 | va_list args;
|
|---|
| 363 | va_start(args, format);
|
|---|
| 364 | vsprintf(print_buffer, format, args);
|
|---|
| [8180d8] | 365 | printf("vmg: Rank %d: %s\n", GlobalRank(), print_buffer);
|
|---|
| [d13e27] | 366 | va_end(args);
|
|---|
| 367 | }
|
|---|
| [dfed1c] | 368 | }
|
|---|
| 369 |
|
|---|
| [d13e27] | 370 | void CommMPI::PrintOnce(const OutputLevel level, const char* format, ...)
|
|---|
| [dfed1c] | 371 | {
|
|---|
| [d13e27] | 372 | bool print = (level == Output);
|
|---|
| 373 |
|
|---|
| 374 | #ifdef OUTPUT_INFO
|
|---|
| 375 | print |= (level == Info);
|
|---|
| 376 | #endif
|
|---|
| 377 | #ifdef OUTPUT_DEBUG
|
|---|
| 378 | print |= (level == Debug);
|
|---|
| 379 | #endif
|
|---|
| 380 | #ifdef OUTPUT_TIMING
|
|---|
| 381 | print |= (level == Timing);
|
|---|
| 382 | #endif
|
|---|
| 383 |
|
|---|
| 384 | if (GlobalRank() == 0 && print) {
|
|---|
| [dfed1c] | 385 | va_list args;
|
|---|
| 386 | va_start(args, format);
|
|---|
| 387 | vsprintf(print_buffer, format, args);
|
|---|
| [8180d8] | 388 | printf("vmg: Rank %d: %s\n", GlobalRank(), print_buffer);
|
|---|
| [dfed1c] | 389 | va_end(args);
|
|---|
| 390 | }
|
|---|
| 391 | }
|
|---|
| 392 |
|
|---|
| 393 | void CommMPI::PrintXML(const std::string& filename, const std::string& xml_data)
|
|---|
| 394 | {
|
|---|
| 395 | MPI_File file;
|
|---|
| 396 | std::stringstream path, xml_header;
|
|---|
| 397 |
|
|---|
| 398 | pugi::xml_document doc;
|
|---|
| 399 | pugi::xml_node node_data = doc.append_child("Global").append_child("NumProcesses").append_child(pugi::node_pcdata);
|
|---|
| 400 | node_data.set_value(Helper::ToString(GlobalProcs().Product()).c_str());
|
|---|
| 401 | doc.save(xml_header);
|
|---|
| 402 |
|
|---|
| 403 | path << OutputPath() << filename;
|
|---|
| 404 |
|
|---|
| 405 | char* filename_array = Helper::GetCharArray(path.str());
|
|---|
| 406 | char* xml_header_array = Helper::GetCharArray(xml_header.str());
|
|---|
| 407 | char* str_array = Helper::GetCharArray(xml_data);
|
|---|
| 408 |
|
|---|
| 409 | MPI_File_open(MPI_COMM_SELF, filename_array, MPI_MODE_WRONLY|MPI_MODE_CREATE, MPI_INFO_NULL, &file);
|
|---|
| 410 | MPI_File_set_size(file, 0);
|
|---|
| 411 | MPI_File_write(file, xml_header_array, xml_header.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
|
|---|
| 412 | MPI_File_write(file, str_array, xml_data.size(), MPI_CHAR, MPI_STATUS_IGNORE);
|
|---|
| 413 | MPI_File_close(&file);
|
|---|
| 414 |
|
|---|
| 415 | delete [] filename_array;
|
|---|
| 416 | delete [] xml_header_array;
|
|---|
| 417 | delete [] str_array;
|
|---|
| 418 | }
|
|---|
| 419 |
|
|---|
| 420 | void CommMPI::PrintXMLAll(const std::string& filename, const std::string& xml_data)
|
|---|
| 421 | {
|
|---|
| 422 | MPI_File file;
|
|---|
| 423 | std::stringstream path;
|
|---|
| 424 |
|
|---|
| 425 | path << OutputPath() << filename;
|
|---|
| 426 |
|
|---|
| 427 | char* filename_array = Helper::GetCharArray(path.str());
|
|---|
| 428 | char* str_array = Helper::GetCharArray(xml_data);
|
|---|
| 429 |
|
|---|
| 430 | MPI_File_open(comm_global, filename_array, MPI_MODE_WRONLY|MPI_MODE_CREATE, MPI_INFO_NULL, &file);
|
|---|
| 431 | MPI_File_set_size(file, 0);
|
|---|
| 432 |
|
|---|
| 433 | if (GlobalRank() == 0) {
|
|---|
| 434 | std::stringstream xml_header;
|
|---|
| 435 | pugi::xml_document doc;
|
|---|
| 436 | pugi::xml_node node_data = doc.append_child("Global").append_child("NumProcesses").append_child(pugi::node_pcdata);
|
|---|
| 437 | node_data.set_value(Helper::ToString(GlobalProcs().Product()).c_str());
|
|---|
| 438 | doc.save(xml_header);
|
|---|
| 439 |
|
|---|
| 440 | char* xml_header_array = Helper::GetCharArray(xml_header.str());
|
|---|
| 441 |
|
|---|
| 442 | MPI_File_write_shared(file, xml_header_array, xml_header.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
|
|---|
| 443 |
|
|---|
| 444 | delete [] xml_header_array;
|
|---|
| 445 | }
|
|---|
| 446 |
|
|---|
| 447 | MPI_File_write_ordered(file, str_array, xml_data.size(), MPI_CHAR, MPI_STATUS_IGNORE);
|
|---|
| 448 | MPI_File_close(&file);
|
|---|
| 449 |
|
|---|
| 450 | delete [] filename_array;
|
|---|
| 451 | delete [] str_array;
|
|---|
| 452 | }
|
|---|
| 453 |
|
|---|
| [ac6d04] | 454 | void CommMPI::PrintGridInformation(const Grid& grid, char* filename, const std::string& name)
|
|---|
| [dfed1c] | 455 | {
|
|---|
| 456 | std::stringstream buf;
|
|---|
| 457 | MPI_File file;
|
|---|
| [ac6d04] | 458 | int rank, size;
|
|---|
| 459 | int size_local, size_local_max;
|
|---|
| [dfed1c] | 460 |
|
|---|
| [ac6d04] | 461 | MPI_Comm comm = settings.CommunicatorGlobal(grid);
|
|---|
| 462 | MPI_Comm comm_local = settings.CommunicatorLocal(grid);
|
|---|
| [dfed1c] | 463 |
|
|---|
| [ac6d04] | 464 | if (comm_local != MPI_COMM_NULL)
|
|---|
| 465 | MPI_Comm_size(comm_local, &size_local);
|
|---|
| 466 | else
|
|---|
| 467 | size_local = 0;
|
|---|
| [dfed1c] | 468 |
|
|---|
| [ac6d04] | 469 | if (comm != MPI_COMM_NULL) {
|
|---|
| [dfed1c] | 470 |
|
|---|
| [ac6d04] | 471 | MPI_Reduce(&size_local, &size_local_max, 1, MPI_INT, MPI_MAX, 0, comm);
|
|---|
| 472 |
|
|---|
| 473 | MPI_File_open(comm, filename, MPI_MODE_WRONLY|MPI_MODE_CREATE|MPI_MODE_APPEND, MPI_INFO_NULL, &file);
|
|---|
| 474 |
|
|---|
| 475 | MPI_Comm_rank(comm, &rank);
|
|---|
| 476 | MPI_Comm_size(comm, &size);
|
|---|
| 477 |
|
|---|
| 478 | if (rank == 0) {
|
|---|
| 479 |
|
|---|
| 480 | buf << "###########################################################" << std::endl
|
|---|
| [8180d8] | 481 | << "GLOBAL INFORMATION:" << std::endl
|
|---|
| 482 | << " NAME: " << name << std::endl
|
|---|
| 483 | << " LEVEL: " << grid.Level() << std::endl
|
|---|
| 484 | << " COMM_SIZE_GLOBAL: " << size << std::endl
|
|---|
| 485 | << " COMM_SIZE_LOCAL: " << size_local_max << std::endl
|
|---|
| 486 | << " GLOBAL:" << std::endl
|
|---|
| 487 | << " GLOBAL_BEGIN: " << grid.Global().GlobalBegin() << std::endl
|
|---|
| 488 | << " GLOBAL_END: " << grid.Global().GlobalEnd() << std::endl
|
|---|
| [e58835d] | 489 | << " GLOBAL_SIZE: " << grid.Global().GlobalSize() << std::endl
|
|---|
| [8180d8] | 490 | << " GLOBAL_BEGIN_FINEST: " << grid.Global().GlobalBeginFinest() << std::endl
|
|---|
| 491 | << " GLOBAL_END_FINEST: " << grid.Global().GlobalEndFinest() << std::endl
|
|---|
| 492 | << " GLOBAL_SIZE_FINEST: " << grid.Global().GlobalSizeFinest() << std::endl
|
|---|
| 493 | << " EXTENT:" << std::endl
|
|---|
| 494 | << " BEGIN: " << grid.Extent().Begin() << std::endl
|
|---|
| 495 | << " END: " << grid.Extent().End() << std::endl
|
|---|
| 496 | << " SIZE: " << grid.Extent().Size() << std::endl
|
|---|
| 497 | << " MESH_WIDTH: " << grid.Extent().MeshWidth() << std::endl
|
|---|
| 498 | << std::endl
|
|---|
| 499 | << "LOCAL INFORMATION:" << std::endl;
|
|---|
| [ac6d04] | 500 | }
|
|---|
| [dfed1c] | 501 |
|
|---|
| [ac6d04] | 502 | buf << "RANK " << rank << ":" << std::endl
|
|---|
| [8180d8] | 503 | << " GLOBAL:" << std::endl
|
|---|
| 504 | << " LOCAL_BEGIN: " << grid.Global().LocalBegin() << std::endl
|
|---|
| 505 | << " LOCAL_END: " << grid.Global().LocalEnd() << std::endl
|
|---|
| 506 | << " LOCAL_SIZE: " << grid.Global().LocalSize() << std::endl
|
|---|
| 507 | << " BOUNDARY_TYPE: " << grid.Global().BoundaryType() << std::endl
|
|---|
| 508 | << " LOCAL:" << std::endl
|
|---|
| 509 | << " BEGIN: " << grid.Local().Begin() << std::endl
|
|---|
| 510 | << " END: " << grid.Local().End() << std::endl
|
|---|
| 511 | << " SIZE: " << grid.Local().Size() << std::endl
|
|---|
| 512 | << " SIZE_TOTAL: " << grid.Local().SizeTotal() << std::endl
|
|---|
| 513 | << " HALO_BEGIN_1: " << grid.Local().HaloBegin1() << std::endl
|
|---|
| 514 | << " HALO_END_1: " << grid.Local().HaloEnd1() << std::endl
|
|---|
| 515 | << " HALO_SIZE_1: " << grid.Local().HaloSize1() << std::endl
|
|---|
| 516 | << " HALO_BEGIN_2: " << grid.Local().HaloBegin2() << std::endl
|
|---|
| 517 | << " HALO_END_2: " << grid.Local().HaloEnd2() << std::endl
|
|---|
| 518 | << " HALO_SIZE_2: " << grid.Local().HaloSize2() << std::endl
|
|---|
| 519 | << " BOUNDARY_BEGIN_1: " << grid.Local().BoundaryBegin1() << std::endl
|
|---|
| 520 | << " BOUNDARY_END_1: " << grid.Local().BoundaryEnd1() << std::endl
|
|---|
| 521 | << " BOUNDARY_SIZE_1: " << grid.Local().BoundarySize1() << std::endl
|
|---|
| 522 | << " BOUNDARY_BEGIN_2: " << grid.Local().BoundaryBegin2() << std::endl
|
|---|
| 523 | << " BOUNDARY_END_2: " << grid.Local().BoundaryEnd2() << std::endl
|
|---|
| 524 | << " BOUNDARY_SIZE_2: " << grid.Local().BoundarySize2() << std::endl;
|
|---|
| [ac6d04] | 525 |
|
|---|
| 526 | if (rank == size-1)
|
|---|
| 527 | buf << "###########################################################" << std::endl;
|
|---|
| [dfed1c] | 528 |
|
|---|
| [ac6d04] | 529 | char* char_buf = Helper::GetCharArray(buf.str());
|
|---|
| 530 | MPI_File_write_ordered(file, char_buf, buf.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
|
|---|
| 531 | delete [] char_buf;
|
|---|
| [dfed1c] | 532 |
|
|---|
| [ac6d04] | 533 | MPI_File_close(&file);
|
|---|
| [dfed1c] | 534 |
|
|---|
| [ac6d04] | 535 | }
|
|---|
| 536 | }
|
|---|
| [dfed1c] | 537 |
|
|---|
| [0bd47e] | 538 | void CommMPI::PrintDatatypes(char* filename)
|
|---|
| 539 | {
|
|---|
| 540 | std::stringstream buf;
|
|---|
| 541 | MPI_File file;
|
|---|
| 542 |
|
|---|
| 543 | if (comm_global != MPI_COMM_NULL) {
|
|---|
| 544 |
|
|---|
| 545 | MPI_File_open(comm_global, filename, MPI_MODE_WRONLY|MPI_MODE_CREATE|MPI_MODE_APPEND, MPI_INFO_NULL, &file);
|
|---|
| 546 |
|
|---|
| 547 | buf << settings << std::endl;
|
|---|
| 548 |
|
|---|
| 549 | char* char_buf = Helper::GetCharArray(buf.str());
|
|---|
| 550 | MPI_File_write_ordered(file, char_buf, buf.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
|
|---|
| 551 | delete [] char_buf;
|
|---|
| 552 |
|
|---|
| 553 | MPI_File_close(&file);
|
|---|
| 554 |
|
|---|
| 555 | }
|
|---|
| 556 | }
|
|---|
| 557 |
|
|---|
| [ac6d04] | 558 | void CommMPI::PrintAllSettings()
|
|---|
| 559 | {
|
|---|
| 560 | std::stringstream buf;
|
|---|
| 561 | MPI_File file;
|
|---|
| [dfed1c] | 562 |
|
|---|
| [ac6d04] | 563 | const Multigrid& mg = *MG::GetSol();
|
|---|
| [dfed1c] | 564 |
|
|---|
| [ac6d04] | 565 | buf << OutputPath() << "settings.txt";
|
|---|
| 566 | char *filename = Helper::GetCharArray(buf.str());
|
|---|
| [dfed1c] | 567 |
|
|---|
| [ac6d04] | 568 | MPI_File_open(comm_global, filename, MPI_MODE_WRONLY|MPI_MODE_CREATE, MPI_INFO_NULL, &file);
|
|---|
| 569 | MPI_File_set_size(file, 0);
|
|---|
| 570 | MPI_File_close(&file);
|
|---|
| [dfed1c] | 571 |
|
|---|
| [ac6d04] | 572 | for (int i=mg.MinLevel(); i<=mg.MaxLevel(); ++i)
|
|---|
| 573 | PrintGridInformation(mg(i), filename, "MULTIGRID");
|
|---|
| [dfed1c] | 574 |
|
|---|
| [ac6d04] | 575 | for (int i=mg.MinLevel()+1; i<=mg.MaxLevel(); ++i)
|
|---|
| 576 | PrintGridInformation(settings.CoarserGrid(mg(i)), filename, "COARSER_GRID");
|
|---|
| [dfed1c] | 577 |
|
|---|
| [ac6d04] | 578 | for (int i=mg.MinLevel(); i<mg.MaxLevel(); ++i)
|
|---|
| 579 | PrintGridInformation(settings.FinerGrid(mg(i)), filename, "FINER_GRID");
|
|---|
| [dfed1c] | 580 |
|
|---|
| [0bd47e] | 581 | PrintDatatypes(filename);
|
|---|
| 582 |
|
|---|
| [dfed1c] | 583 | delete [] filename;
|
|---|
| 584 |
|
|---|
| 585 | }
|
|---|
| 586 |
|
|---|
| 587 | void CommMPI::PrintGrid(Grid& grid, const char* information)
|
|---|
| 588 | {
|
|---|
| [ac6d04] | 589 | int output_count = OutputCount();
|
|---|
| [dfed1c] | 590 |
|
|---|
| [ac6d04] | 591 | #ifdef HAVE_VTK
|
|---|
| [dfed1c] | 592 |
|
|---|
| [ac6d04] | 593 | if (settings.CommunicatorLocal(grid) != MPI_COMM_NULL) {
|
|---|
| [dfed1c] | 594 |
|
|---|
| [ac6d04] | 595 | vtkSmartPointer<vtkImageData> image = vtkSmartPointer<vtkImageData>::New();
|
|---|
| [f57182] | 596 | image->SetExtent(grid.Global().LocalBegin().X(), grid.Global().LocalEnd().X()-1,
|
|---|
| [8180d8] | 597 | grid.Global().LocalBegin().Y(), grid.Global().LocalEnd().Y()-1,
|
|---|
| 598 | grid.Global().LocalBegin().Z(), grid.Global().LocalEnd().Z()-1);
|
|---|
| 599 |
|
|---|
| [ac6d04] | 600 | image->SetSpacing(grid.Extent().MeshWidth().vec());
|
|---|
| 601 | image->SetOrigin(grid.Extent().Begin().vec());
|
|---|
| 602 | image->SetScalarTypeToDouble();
|
|---|
| 603 | image->SetNumberOfScalarComponents(1);
|
|---|
| 604 | image->AllocateScalars();
|
|---|
| 605 | image->GetPointData()->GetScalars()->SetName(information);
|
|---|
| [dfed1c] | 606 |
|
|---|
| [8180d8] | 607 | Index i;
|
|---|
| [43f24e5] | 608 | Index begin, end, offset;
|
|---|
| 609 |
|
|---|
| 610 | for (int j=0; j<3; ++j) {
|
|---|
| 611 | if (grid.Local().BoundarySize1()[j] > 0) {
|
|---|
| 612 | begin[j] = grid.Local().BoundaryBegin1()[j];
|
|---|
| 613 | offset[j] = grid.Global().LocalBegin()[j];
|
|---|
| 614 | } else {
|
|---|
| 615 | begin[j] = grid.Local().Begin()[j];
|
|---|
| 616 | offset[j] = grid.Global().LocalBegin()[j] - grid.Local().Begin()[j];
|
|---|
| 617 | }
|
|---|
| 618 | end[j] = (grid.Local().BoundarySize2()[j] > 0 ? grid.Local().BoundaryEnd2()[j] : grid.Local().End()[j]);
|
|---|
| 619 | }
|
|---|
| [f57182] | 620 |
|
|---|
| [43f24e5] | 621 | for (i.X()=begin.X(); i.X()<end.X(); ++i.X())
|
|---|
| 622 | for (i.Y()=begin.Y(); i.Y()<end.Y(); ++i.Y())
|
|---|
| 623 | for (i.Z()=begin.Z(); i.Z()<end.Z(); ++i.Z())
|
|---|
| [8180d8] | 624 | image->SetScalarComponentFromDouble(i.X() + offset.X(),
|
|---|
| 625 | i.Y() + offset.Y(),
|
|---|
| 626 | i.Z() + offset.Z(),
|
|---|
| 627 | 0, grid.GetVal(i));
|
|---|
| [ac6d04] | 628 |
|
|---|
| 629 | image->Update();
|
|---|
| 630 |
|
|---|
| 631 | int rank, size;
|
|---|
| 632 | MPI_Comm_rank(comm_global, &rank);
|
|---|
| 633 | MPI_Comm_size(comm_global, &size);
|
|---|
| 634 |
|
|---|
| 635 | std::stringstream filename;
|
|---|
| 636 | filename << OutputPath() << std::setw(4) << std::setfill('0') << output_count << "_" << rank << ".vti";
|
|---|
| 637 |
|
|---|
| 638 | vtkSmartPointer<vtkXMLImageDataWriter> writer = vtkSmartPointer<vtkXMLImageDataWriter>::New();
|
|---|
| 639 | writer->SetFileName(filename.str().c_str());
|
|---|
| 640 | writer->SetCompressorTypeToNone();
|
|---|
| 641 | writer->SetDataModeToAscii();
|
|---|
| 642 | writer->SetInput(image);
|
|---|
| 643 | writer->Update();
|
|---|
| 644 | writer->Write();
|
|---|
| [dfed1c] | 645 |
|
|---|
| 646 | }
|
|---|
| 647 |
|
|---|
| [43f24e5] | 648 | //FIXME
|
|---|
| [ac6d04] | 649 | #else /* HAVE_VTK */
|
|---|
| [dfed1c] | 650 | Index i;
|
|---|
| 651 | std::stringstream buf;
|
|---|
| 652 |
|
|---|
| [ac6d04] | 653 | Index begin, end;
|
|---|
| [dfed1c] | 654 | Index begin_local, end_local, begin_global, end_global;
|
|---|
| 655 |
|
|---|
| 656 | CommToGhosts(grid);
|
|---|
| 657 |
|
|---|
| 658 | for (int i=0; i<3; ++i) {
|
|---|
| [ac6d04] | 659 | end[i] = grid.Local().End()[i] + (grid.Global().LocalEnd()[i] == grid.Global().GlobalSize()[i] ? 0 : grid.Local().HaloSize1()[i]);
|
|---|
| 660 | end_local[i] = grid.Global().LocalEnd()[i] - (grid.Global().LocalEnd()[i] == grid.Global().GlobalSize()[i] ? 1 : 0);
|
|---|
| [dfed1c] | 661 | }
|
|---|
| 662 |
|
|---|
| [ac6d04] | 663 | begin = grid.Local().Begin();
|
|---|
| 664 | begin_local = grid.Global().LocalBegin();
|
|---|
| 665 | begin_global = 0;
|
|---|
| 666 | end_global = grid.Global().GlobalSize()-1;
|
|---|
| [dfed1c] | 667 |
|
|---|
| 668 | for (i.Z()=begin.Z(); i.Z()<end.Z(); ++i.Z())
|
|---|
| 669 | for (i.Y()=begin.Y(); i.Y()<end.Y(); ++i.Y())
|
|---|
| 670 | for (i.X()=begin.X(); i.X()<end.X(); ++i.X())
|
|---|
| [8180d8] | 671 | buf << std::scientific << grid.GetVal(i) << " ";
|
|---|
| [dfed1c] | 672 |
|
|---|
| 673 | CreateOutputFiles(grid, buf, information,
|
|---|
| [8180d8] | 674 | begin_global, end_global,
|
|---|
| 675 | begin_local, end_local,
|
|---|
| 676 | output_count);
|
|---|
| [ac6d04] | 677 | #endif /* HAVE_VTK */
|
|---|
| [dfed1c] | 678 | }
|
|---|
| 679 |
|
|---|
| [ac6d04] | 680 | void CommMPI::PrintDefect(Grid& sol, Grid& rhs, const char* information)
|
|---|
| [dfed1c] | 681 | {
|
|---|
| [ac6d04] | 682 | TempGrid *temp = MG::GetTempGrid();
|
|---|
| 683 | temp->SetProperties(sol);
|
|---|
| 684 | temp->ImportFromResidual(sol, rhs);
|
|---|
| 685 | PrintGrid(*temp, information);
|
|---|
| [dfed1c] | 686 | }
|
|---|
| 687 |
|
|---|
| 688 | void CommMPI::CreateOutputFiles(const Grid& grid, const std::stringstream& serial_data, const char* information,
|
|---|
| [8180d8] | 689 | const Index& begin_global, const Index& end_global,
|
|---|
| 690 | const Index& begin_local, const Index& end_local,
|
|---|
| 691 | const int& output_count)
|
|---|
| [dfed1c] | 692 | {
|
|---|
| [894a5f] | 693 | MPI_Comm comm = settings.CommunicatorGlobal(grid);
|
|---|
| [dfed1c] | 694 |
|
|---|
| 695 | if (comm != MPI_COMM_NULL) {
|
|---|
| 696 |
|
|---|
| 697 | MPI_File file;
|
|---|
| 698 | std::string conv_information = Helper::ReplaceWhitespaces(information, "_");
|
|---|
| 699 |
|
|---|
| 700 | CreateParallelOutputFile(grid, comm, output_count, conv_information.c_str(),
|
|---|
| [8180d8] | 701 | begin_global, end_global, begin_local, end_local);
|
|---|
| [dfed1c] | 702 |
|
|---|
| 703 | file = CreateSerialOutputFile(grid, comm, output_count, conv_information.c_str(),
|
|---|
| [8180d8] | 704 | begin_global, end_global, begin_local, end_local);
|
|---|
| [dfed1c] | 705 |
|
|---|
| 706 | char *char_buf = Helper::GetCharArray(serial_data.str());
|
|---|
| 707 | MPI_File_write(file, char_buf, serial_data.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
|
|---|
| 708 | delete [] char_buf;
|
|---|
| 709 |
|
|---|
| 710 | FinalizeSerialOutputFile(file);
|
|---|
| 711 |
|
|---|
| 712 | }
|
|---|
| 713 | }
|
|---|
| 714 |
|
|---|
| 715 | void CommMPI::CreateParallelOutputFile(const Grid& grid, MPI_Comm& comm,
|
|---|
| [8180d8] | 716 | const int& output_count, const char* information,
|
|---|
| 717 | const Index& begin_global, const Index& end_global,
|
|---|
| 718 | const Index& begin_local, const Index& end_local)
|
|---|
| [dfed1c] | 719 | {
|
|---|
| 720 | int rank;
|
|---|
| 721 | MPI_File file;
|
|---|
| 722 | char parallel_filename[513], serial_filename[513];
|
|---|
| 723 | std::stringstream buf;
|
|---|
| 724 |
|
|---|
| 725 | MPI_Comm_rank(comm, &rank);
|
|---|
| 726 |
|
|---|
| 727 | sprintf(parallel_filename, "%s%04d.pvti", OutputPath().c_str(), output_count);
|
|---|
| 728 | sprintf(serial_filename, "%04d_%d.vti", output_count, rank);
|
|---|
| 729 |
|
|---|
| 730 | MPI_File_open(comm, parallel_filename, MPI_MODE_WRONLY|MPI_MODE_CREATE, MPI_INFO_NULL, &file);
|
|---|
| 731 | MPI_File_set_size(file, 0);
|
|---|
| 732 |
|
|---|
| 733 | if (rank == 0) {
|
|---|
| 734 |
|
|---|
| 735 | buf << "<?xml version=\"1.0\"?>" << std::endl
|
|---|
| [8180d8] | 736 | << "<VTKFile type=\"PImageData\" version=\"0.1\" byte_order=\"LittleEndian\">" << std::endl
|
|---|
| 737 | << " <PImageData WholeExtent=\"";
|
|---|
| [dfed1c] | 738 |
|
|---|
| 739 | for (int i=0; i<3; ++i)
|
|---|
| [ac6d04] | 740 | buf << begin_global[i] << " " << end_global[i] << " ";
|
|---|
| [dfed1c] | 741 |
|
|---|
| 742 | buf << "\"" << std::endl
|
|---|
| [8180d8] | 743 | << " GhostLevel=\"0\" Origin=\"0 0 0\" Spacing=\"";
|
|---|
| [dfed1c] | 744 |
|
|---|
| 745 | for (int i=0; i<3; ++i)
|
|---|
| 746 | buf << grid.Extent().MeshWidth()[i] << " ";
|
|---|
| 747 |
|
|---|
| 748 | buf << "\">" << std::endl
|
|---|
| [8180d8] | 749 | << " <PPointData Scalars=\"" << information << "\">" << std::endl
|
|---|
| 750 | << " <PDataArray type=\"Float32\" Name=\"" << information << "\"/>" << std::endl
|
|---|
| 751 | << " </PPointData>" << std::endl;
|
|---|
| [dfed1c] | 752 |
|
|---|
| 753 | char* char_buf = Helper::GetCharArray(buf.str());
|
|---|
| 754 |
|
|---|
| 755 | MPI_File_write_shared(file, char_buf, buf.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
|
|---|
| 756 |
|
|---|
| 757 | delete [] char_buf;
|
|---|
| 758 | }
|
|---|
| 759 |
|
|---|
| 760 | buf.str("");
|
|---|
| 761 |
|
|---|
| [ac6d04] | 762 | if ((end_local-begin_local).Product() > 0) {
|
|---|
| 763 | buf << " <Piece Extent=\"";
|
|---|
| 764 |
|
|---|
| 765 | for (int i=0; i<3; ++i)
|
|---|
| 766 | buf << begin_local[i] << " " << end_local[i] << " ";
|
|---|
| [dfed1c] | 767 |
|
|---|
| [ac6d04] | 768 | buf << "\" Source=\"" << serial_filename << "\"/>" << std::endl;
|
|---|
| 769 | }
|
|---|
| [dfed1c] | 770 |
|
|---|
| 771 | char* char_buf = Helper::GetCharArray(buf.str());
|
|---|
| 772 |
|
|---|
| 773 | MPI_File_write_ordered(file, char_buf, buf.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
|
|---|
| 774 |
|
|---|
| 775 | delete [] char_buf;
|
|---|
| 776 |
|
|---|
| 777 | if (rank == 0) {
|
|---|
| 778 |
|
|---|
| 779 | buf.str("");
|
|---|
| 780 |
|
|---|
| 781 | buf << " </PImageData>" << std::endl
|
|---|
| [8180d8] | 782 | << "</VTKFile>" << std::endl;
|
|---|
| [dfed1c] | 783 |
|
|---|
| 784 | char* char_buf = Helper::GetCharArray(buf.str());
|
|---|
| 785 |
|
|---|
| 786 | MPI_File_write_shared(file, char_buf, buf.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
|
|---|
| 787 |
|
|---|
| 788 | delete [] char_buf;
|
|---|
| 789 | }
|
|---|
| 790 |
|
|---|
| 791 | MPI_File_close(&file);
|
|---|
| 792 | }
|
|---|
| 793 |
|
|---|
| 794 | MPI_File CommMPI::CreateSerialOutputFile(const Grid& grid, MPI_Comm& comm,
|
|---|
| [8180d8] | 795 | const int& output_count, const char* information,
|
|---|
| 796 | const Index& begin_global, const Index& end_global,
|
|---|
| 797 | const Index& begin_local, const Index& end_local)
|
|---|
| [dfed1c] | 798 | {
|
|---|
| 799 | char serial_filename[513];
|
|---|
| 800 | int rank;
|
|---|
| 801 | MPI_File file;
|
|---|
| 802 | std::stringstream buf;
|
|---|
| 803 |
|
|---|
| 804 | MPI_Comm_rank(comm_global, &rank);
|
|---|
| 805 |
|
|---|
| 806 | sprintf(serial_filename, "%s%04d_%d.vti", OutputPath().c_str(), output_count, rank);
|
|---|
| 807 |
|
|---|
| 808 | MPI_File_open(MPI_COMM_SELF, serial_filename, MPI_MODE_WRONLY|MPI_MODE_CREATE, MPI_INFO_NULL, &file);
|
|---|
| 809 |
|
|---|
| 810 | buf << "<?xml version=\"1.0\"?>" << std::endl
|
|---|
| 811 | << "<VTKFile type=\"ImageData\" version=\"0.1\" byte_order=\"LittleEndian\">" << std::endl
|
|---|
| 812 | << " <ImageData WholeExtent=\"";
|
|---|
| 813 |
|
|---|
| 814 | for (int i=0; i<3; ++i)
|
|---|
| [ac6d04] | 815 | buf << begin_global[i] << " " << end_global[i] << " ";
|
|---|
| [dfed1c] | 816 |
|
|---|
| 817 | buf << "\"" << std::endl
|
|---|
| 818 | << " Origin=\"0 0 0\" Spacing=\"";
|
|---|
| 819 |
|
|---|
| 820 | for (int i=0; i<3; ++i)
|
|---|
| 821 | buf << grid.Extent().MeshWidth()[i] << " ";
|
|---|
| 822 |
|
|---|
| 823 | buf << "\">" << std::endl
|
|---|
| 824 | << " <Piece Extent=\"";
|
|---|
| 825 |
|
|---|
| 826 | for (int i=0; i<3; ++i)
|
|---|
| [ac6d04] | 827 | buf << begin_local[i] << " " << end_local[i] << " ";
|
|---|
| [dfed1c] | 828 |
|
|---|
| 829 | buf << "\">" << std::endl
|
|---|
| 830 | << " <PointData Scalars=\"" << information << "\">" << std::endl
|
|---|
| 831 | << " <DataArray type=\"Float32\" Name=\"" << information << "\" format=\"ascii\">" << std::endl
|
|---|
| 832 | << " ";
|
|---|
| 833 |
|
|---|
| 834 | char* char_buf = Helper::GetCharArray(buf.str());
|
|---|
| 835 | MPI_File_write(file, char_buf, buf.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
|
|---|
| 836 | delete [] char_buf;
|
|---|
| 837 |
|
|---|
| 838 | return file;
|
|---|
| 839 | }
|
|---|
| 840 |
|
|---|
| 841 | void CommMPI::FinalizeSerialOutputFile(MPI_File& file)
|
|---|
| 842 | {
|
|---|
| 843 | std::stringstream buf;
|
|---|
| 844 |
|
|---|
| 845 | buf << std::endl
|
|---|
| 846 | << " </DataArray>" << std::endl
|
|---|
| 847 | << " </PointData>" << std::endl
|
|---|
| 848 | << " </Piece>" << std::endl
|
|---|
| 849 | << " </ImageData>" << std::endl
|
|---|
| 850 | << "</VTKFile>" << std::endl;
|
|---|
| 851 |
|
|---|
| 852 | char* char_buf = Helper::GetCharArray(buf.str());
|
|---|
| 853 | MPI_File_write(file, char_buf, buf.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
|
|---|
| 854 | delete [] char_buf;
|
|---|
| 855 |
|
|---|
| 856 | MPI_File_close(&file);
|
|---|
| 857 | }
|
|---|
| 858 |
|
|---|
| 859 | int CommMPI::GlobalRank() const
|
|---|
| 860 | {
|
|---|
| 861 | int rank;
|
|---|
| 862 | MPI_Comm_rank(comm_global, &rank);
|
|---|
| 863 | return rank;
|
|---|
| 864 | }
|
|---|
| 865 |
|
|---|
| [ac6d04] | 866 | int CommMPI::GlobalSize() const
|
|---|
| 867 | {
|
|---|
| 868 | int size;
|
|---|
| 869 | MPI_Comm_size(comm_global, &size);
|
|---|
| 870 | return size;
|
|---|
| 871 | }
|
|---|
| 872 |
|
|---|
| [dfed1c] | 873 | Index CommMPI::GlobalPos() const
|
|---|
| 874 | {
|
|---|
| 875 | Index dims, periods, coords;
|
|---|
| 876 | MPI_Cart_get(comm_global, 3, dims.vec(), periods.vec(), coords.vec());
|
|---|
| 877 | return coords;
|
|---|
| 878 | }
|
|---|
| 879 |
|
|---|
| 880 | Index CommMPI::GlobalProcs() const
|
|---|
| 881 | {
|
|---|
| 882 | Index dims, periods, coords;
|
|---|
| 883 | MPI_Cart_get(comm_global, 3, dims.vec(), periods.vec(), coords.vec());
|
|---|
| [ac6d04] | 884 | return dims;
|
|---|
| 885 | }
|
|---|
| 886 |
|
|---|
| 887 | int CommMPI::Rank(const Grid& grid) const
|
|---|
| 888 | {
|
|---|
| 889 | int rank;
|
|---|
| 890 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
|---|
| 891 | assert(comm != MPI_COMM_NULL);
|
|---|
| 892 | MPI_Comm_rank(comm, &rank);
|
|---|
| 893 | return rank;
|
|---|
| 894 | }
|
|---|
| 895 |
|
|---|
| 896 | int CommMPI::Size(const Grid& grid) const
|
|---|
| 897 | {
|
|---|
| 898 | int size;
|
|---|
| 899 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
|---|
| 900 | assert(comm != MPI_COMM_NULL);
|
|---|
| 901 | MPI_Comm_size(comm, &size);
|
|---|
| 902 | return size;
|
|---|
| 903 | }
|
|---|
| [dfed1c] | 904 |
|
|---|
| [ac6d04] | 905 | Index CommMPI::Pos(const Grid& grid) const
|
|---|
| 906 | {
|
|---|
| 907 | Index dims, periods, coords;
|
|---|
| 908 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
|---|
| 909 | assert(comm != MPI_COMM_NULL);
|
|---|
| 910 | MPI_Cart_get(comm, 3, dims.vec(), periods.vec(), coords.vec());
|
|---|
| 911 | return coords;
|
|---|
| 912 | }
|
|---|
| 913 |
|
|---|
| 914 | Index CommMPI::Procs(const Grid& grid) const
|
|---|
| 915 | {
|
|---|
| 916 | Index dims, periods, coords;
|
|---|
| 917 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
|---|
| 918 | assert(comm != MPI_COMM_NULL);
|
|---|
| 919 | MPI_Cart_get(comm, 3, dims.vec(), periods.vec(), coords.vec());
|
|---|
| [dfed1c] | 920 | return dims;
|
|---|
| 921 | }
|
|---|
| 922 |
|
|---|
| 923 | void CommMPI::InitCommMPI(const MPI_Comm& comm)
|
|---|
| 924 | {
|
|---|
| 925 | int status, size, rank;
|
|---|
| 926 | int dims[3] = {0, 0, 0};
|
|---|
| 927 | int periods[3];
|
|---|
| 928 |
|
|---|
| 929 | for (int i=0; i<3; ++i)
|
|---|
| 930 | periods[i] = (BoundaryConditions()[i] == Periodic ? 1 : 0);
|
|---|
| 931 |
|
|---|
| 932 | MPI_Comm_size(comm, &size);
|
|---|
| 933 | MPI_Comm_rank(comm, &rank);
|
|---|
| 934 |
|
|---|
| 935 | MPI_Topo_test(comm, &status);
|
|---|
| 936 |
|
|---|
| 937 | if (status == MPI_CART) {
|
|---|
| 938 |
|
|---|
| 939 | comm_global = comm;
|
|---|
| 940 |
|
|---|
| 941 | }else {
|
|---|
| 942 |
|
|---|
| [71b148] | 943 | const int log2 = Helper::log_2(size);
|
|---|
| 944 |
|
|---|
| 945 | if (Helper::intpow(2, log2) == size) {
|
|---|
| 946 | for (int i=0; i<3; ++i)
|
|---|
| 947 | dims[i] = Helper::intpow(2, log2 / 3 + (log2%3 > i ? 1 : 0));
|
|---|
| 948 | }else {
|
|---|
| 949 | MPI_Dims_create(size, 3, dims);
|
|---|
| 950 | }
|
|---|
| 951 |
|
|---|
| [d13e27] | 952 | #ifdef OUTPUT_DEBUG
|
|---|
| [d7de94] | 953 | if (rank == 0)
|
|---|
| [d13e27] | 954 | std::printf("Process grid: %d %d %d\n", dims[0], dims[1], dims[2]);
|
|---|
| [d7de94] | 955 | #endif
|
|---|
| [dfed1c] | 956 |
|
|---|
| 957 | MPI_Cart_create(comm, 3, dims, periods, 1, &comm_global);
|
|---|
| 958 |
|
|---|
| 959 | }
|
|---|
| 960 |
|
|---|
| 961 | MPI_Info_create(&info);
|
|---|
| 962 | char key[] = "no_locks";
|
|---|
| 963 | char val[] = "true";
|
|---|
| 964 | MPI_Info_set(info, key, val);
|
|---|
| 965 |
|
|---|
| 966 | }
|
|---|
| 967 |
|
|---|
| 968 | CommMPI::~CommMPI()
|
|---|
| 969 | {
|
|---|
| 970 | MPI_Comm_free(&comm_global);
|
|---|
| [ac6d04] | 971 | #ifdef VMG_ONE_SIDED
|
|---|
| [dfed1c] | 972 | if (win_created)
|
|---|
| 973 | MPI_Win_free(&win);
|
|---|
| [ac6d04] | 974 | #endif
|
|---|
| [dfed1c] | 975 | MPI_Info_free(&info);
|
|---|
| [894a5f] | 976 | }
|
|---|
| [dfed1c] | 977 |
|
|---|
| [894a5f] | 978 | Grid& CommMPI::GetCoarserGrid(Multigrid& multigrid)
|
|---|
| 979 | {
|
|---|
| 980 | return settings.CoarserGrid(multigrid(multigrid.Level()));
|
|---|
| [dfed1c] | 981 | }
|
|---|
| 982 |
|
|---|
| [894a5f] | 983 | Grid& CommMPI::GetFinerGrid(Multigrid& multigrid)
|
|---|
| 984 | {
|
|---|
| 985 | return settings.FinerGrid(multigrid(multigrid.Level()));
|
|---|
| 986 | }
|
|---|
| 987 |
|
|---|
| [dfed1c] | 988 | std::string CommMPI::CreateOutputDirectory()
|
|---|
| 989 | {
|
|---|
| 990 | #ifdef HAVE_BOOST_FILESYSTEM
|
|---|
| 991 | std::string path, unique_path;
|
|---|
| 992 | std::stringstream unique_suffix;
|
|---|
| 993 | int suffix_counter = 0;
|
|---|
| 994 | char buffer[129];
|
|---|
| 995 | time_t rawtime;
|
|---|
| 996 | struct tm *timeinfo;
|
|---|
| 997 | int path_size;
|
|---|
| 998 | char* path_array;
|
|---|
| 999 |
|
|---|
| 1000 | if (GlobalRank() == 0) {
|
|---|
| 1001 |
|
|---|
| 1002 | time(&rawtime);
|
|---|
| 1003 | timeinfo = localtime(&rawtime);
|
|---|
| 1004 | strftime(buffer, 128, "./output/%Y_%m_%d_%H_%M_%S/", timeinfo);
|
|---|
| 1005 | path = buffer;
|
|---|
| 1006 |
|
|---|
| 1007 | if (!fs::exists("output"))
|
|---|
| 1008 | fs::create_directory("output");
|
|---|
| 1009 |
|
|---|
| 1010 | unique_path = path;
|
|---|
| 1011 |
|
|---|
| 1012 | while (fs::exists(unique_path.c_str())) {
|
|---|
| 1013 |
|
|---|
| 1014 | unique_suffix.str("");
|
|---|
| 1015 | unique_suffix << "_" << suffix_counter++ << "/";
|
|---|
| 1016 |
|
|---|
| 1017 | unique_path = path;
|
|---|
| 1018 | unique_path.replace(unique_path.size()-1, 1, unique_suffix.str());
|
|---|
| 1019 |
|
|---|
| 1020 | }
|
|---|
| 1021 |
|
|---|
| 1022 | fs::create_directory(unique_path.c_str());
|
|---|
| 1023 |
|
|---|
| 1024 | path_size = unique_path.size() + 1;
|
|---|
| 1025 | path_array = Helper::GetCharArray(unique_path);
|
|---|
| 1026 |
|
|---|
| 1027 | MPI_Bcast(&path_size, 1, MPI_INT, 0, comm_global);
|
|---|
| 1028 |
|
|---|
| 1029 | }else {
|
|---|
| 1030 |
|
|---|
| 1031 | MPI_Bcast(&path_size, 1, MPI_INT, 0, comm_global);
|
|---|
| 1032 | path_array = new char[path_size];
|
|---|
| 1033 |
|
|---|
| 1034 | }
|
|---|
| 1035 |
|
|---|
| 1036 | MPI_Bcast(path_array, path_size, MPI_CHAR, 0, comm_global);
|
|---|
| 1037 |
|
|---|
| 1038 | unique_path = path_array;
|
|---|
| 1039 |
|
|---|
| 1040 | delete [] path_array;
|
|---|
| 1041 |
|
|---|
| 1042 | return unique_path;
|
|---|
| 1043 |
|
|---|
| 1044 | #else
|
|---|
| 1045 |
|
|---|
| 1046 | return "./";
|
|---|
| 1047 |
|
|---|
| 1048 | #endif
|
|---|
| 1049 | }
|
|---|
| 1050 |
|
|---|
| 1051 |
|
|---|
| 1052 | #endif /* HAVE_MPI */
|
|---|