[de061d] | 1 | /*
|
---|
| 2 | * vmg - a versatile multigrid solver
|
---|
| 3 | * Copyright (C) 2012 Institute for Numerical Simulation, University of Bonn
|
---|
| 4 | *
|
---|
| 5 | * vmg is free software: you can redistribute it and/or modify
|
---|
| 6 | * it under the terms of the GNU General Public License as published by
|
---|
| 7 | * the Free Software Foundation, either version 3 of the License, or
|
---|
| 8 | * (at your option) any later version.
|
---|
| 9 | *
|
---|
| 10 | * vmg is distributed in the hope that it will be useful,
|
---|
| 11 | * but WITHOUT ANY WARRANTY; without even the implied warranty of
|
---|
| 12 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
---|
| 13 | * GNU General Public License for more details.
|
---|
| 14 | *
|
---|
| 15 | * You should have received a copy of the GNU General Public License
|
---|
| 16 | * along with this program. If not, see <http://www.gnu.org/licenses/>.
|
---|
| 17 | */
|
---|
| 18 |
|
---|
| 19 | /**
|
---|
| 20 | * @file comm_mpi.cpp
|
---|
| 21 | * @author Julian Iseringhausen <isering@ins.uni-bonn.de>
|
---|
| 22 | * @date Wed Jun 16 13:21:06 2010
|
---|
| 23 | *
|
---|
| 24 | * @brief Class for MPI-based communication.
|
---|
| 25 | *
|
---|
| 26 | */
|
---|
| 27 |
|
---|
| 28 | #ifdef HAVE_CONFIG_H
|
---|
| 29 | #include <libvmg_config.h>
|
---|
| 30 | #endif
|
---|
| 31 |
|
---|
| 32 | #ifdef HAVE_MPI
|
---|
| 33 |
|
---|
| 34 | #include <mpi.h>
|
---|
| 35 | #ifdef HAVE_MARMOT
|
---|
| 36 | #include <enhancempicalls.h>
|
---|
| 37 | #include <sourceinfompicalls.h>
|
---|
| 38 | #endif
|
---|
| 39 |
|
---|
| 40 | #ifdef HAVE_BOOST_FILESYSTEM_PATH_HPP
|
---|
| 41 | #include <boost/filesystem.hpp>
|
---|
| 42 | namespace fs = boost::filesystem;
|
---|
| 43 | #endif
|
---|
| 44 |
|
---|
| 45 | #ifdef HAVE_VTK
|
---|
| 46 | #include <vtkAbstractArray.h>
|
---|
| 47 | #include <vtkImageData.h>
|
---|
| 48 | #include <vtkPointData.h>
|
---|
| 49 | #include <vtkSmartPointer.h>
|
---|
| 50 | #include <vtkXMLImageDataWriter.h>
|
---|
| 51 | #endif
|
---|
| 52 |
|
---|
| 53 | #include <cstring>
|
---|
| 54 | #include <sstream>
|
---|
| 55 |
|
---|
| 56 | #include "base/helper.hpp"
|
---|
| 57 | #include "base/tuple.hpp"
|
---|
| 58 | #include "comm/comm_mpi.hpp"
|
---|
| 59 | #include "comm/mpi/datatypes_local.hpp"
|
---|
| 60 | #include "discretization/boundary_value_setter.hpp"
|
---|
| 61 | #include "grid/grid.hpp"
|
---|
| 62 | #include "grid/multigrid.hpp"
|
---|
| 63 | #include "grid/tempgrid.hpp"
|
---|
| 64 | #include "mg.hpp"
|
---|
| 65 | #include "base/timer.hpp"
|
---|
| 66 |
|
---|
| 67 | static char print_buffer[512];
|
---|
| 68 |
|
---|
| 69 | using namespace VMG;
|
---|
| 70 |
|
---|
| 71 | void CommMPI::IsendAll(Grid& grid, std::vector<VMG::MPI::Datatype>& types, const MPI_Comm& comm, const int& tag_start)
|
---|
| 72 | {
|
---|
| 73 | for (std::vector<VMG::MPI::Datatype>::const_iterator iter=types.begin(); iter!=types.end(); ++iter)
|
---|
| 74 | iter->Isend(grid, tag_start, comm, Request());
|
---|
| 75 | }
|
---|
| 76 |
|
---|
| 77 | void CommMPI::IrecvAll(Grid& grid, std::vector<VMG::MPI::Datatype>& types, const MPI_Comm& comm, const int& tag_start)
|
---|
| 78 | {
|
---|
| 79 | for (std::vector<VMG::MPI::Datatype>::const_iterator iter=types.begin(); iter!=types.end(); ++iter)
|
---|
| 80 | iter->Irecv(grid, tag_start, comm, Request());
|
---|
| 81 | }
|
---|
| 82 |
|
---|
| 83 | void CommMPI::IsendAllBuffered(const Grid& grid, std::vector<VMG::MPI::Datatype>& types, const MPI_Comm& comm, const int& tag_start)
|
---|
| 84 | {
|
---|
| 85 | for (std::vector<VMG::MPI::Datatype>::iterator iter=types.begin(); iter!=types.end(); ++iter)
|
---|
| 86 | iter->IsendBuffered(grid, tag_start, comm, Request());
|
---|
| 87 | }
|
---|
| 88 |
|
---|
| 89 | void CommMPI::IrecvAllBuffered(std::vector<VMG::MPI::Datatype>& types, const MPI_Comm& comm, const int& tag_start)
|
---|
| 90 | {
|
---|
| 91 | for (std::vector<VMG::MPI::Datatype>::iterator iter=types.begin(); iter!=types.end(); ++iter)
|
---|
| 92 | iter->IrecvBuffered(tag_start, comm, Request());
|
---|
| 93 | }
|
---|
| 94 |
|
---|
| 95 | void CommMPI::ReplaceBufferAll(Grid& grid, const std::vector<VMG::MPI::Datatype>& types)
|
---|
| 96 | {
|
---|
| 97 | for (std::vector<VMG::MPI::Datatype>::const_iterator iter=types.begin(); iter!= types.end(); ++iter)
|
---|
| 98 | iter->GridReplace(grid);
|
---|
| 99 | }
|
---|
| 100 |
|
---|
| 101 | void CommMPI::AddBufferAll(Grid& grid, const std::vector<VMG::MPI::Datatype>& types)
|
---|
| 102 | {
|
---|
| 103 | for (std::vector<VMG::MPI::Datatype>::const_iterator iter=types.begin(); iter!= types.end(); ++iter)
|
---|
| 104 | iter->GridSum(grid);
|
---|
| 105 | }
|
---|
| 106 |
|
---|
| 107 | void CommMPI::CommSubgrid(Grid& grid_old, Grid& grid_new, const int& direction)
|
---|
| 108 | {
|
---|
| 109 | MPI_Comm comm = settings.CommunicatorGlobal(grid_old);
|
---|
| 110 | if (comm != MPI_COMM_NULL) {
|
---|
| 111 | VMG::MPI::DatatypesGlobal& datatypes = settings.DatatypesGlobal(grid_old, grid_new, direction);
|
---|
| 112 | IrecvAllBuffered(datatypes.Receive(), comm, 0411);
|
---|
| 113 | IsendAllBuffered(grid_old, datatypes.Send(), comm, 0411);
|
---|
| 114 | WaitAll();
|
---|
| 115 | ReplaceBufferAll(grid_new, datatypes.Receive());
|
---|
| 116 | }
|
---|
| 117 | }
|
---|
| 118 |
|
---|
| 119 | void CommMPI::CommAddSubgrid(Grid& grid_old, Grid& grid_new, const int& direction)
|
---|
| 120 | {
|
---|
| 121 | MPI_Comm comm = settings.CommunicatorGlobal(grid_old);
|
---|
| 122 | if (comm != MPI_COMM_NULL) {
|
---|
| 123 | VMG::MPI::DatatypesGlobal& datatypes = settings.DatatypesGlobal(grid_old, grid_new, direction);
|
---|
| 124 | IrecvAllBuffered(datatypes.Receive(), comm, 1806);
|
---|
| 125 | IsendAllBuffered(grid_old, datatypes.Send(), comm, 1806);
|
---|
| 126 | WaitAll();
|
---|
| 127 | AddBufferAll(grid_new, datatypes.Receive());
|
---|
| 128 | }
|
---|
| 129 | }
|
---|
| 130 |
|
---|
| 131 | std::vector<BoundaryValue> CommMPI::CommBoundaryValues()
|
---|
| 132 | {
|
---|
| 133 | MPI_Comm comm = settings.CommunicatorLocal((*MG::GetRhs())(MG::GetRhs()->MaxLevel()));
|
---|
| 134 | if (comm != MPI_COMM_NULL) {
|
---|
| 135 |
|
---|
| 136 | const std::vector<BoundaryValue>& bvs = MG::GetBoundaryValueSetter()->BoundaryValues();
|
---|
| 137 |
|
---|
| 138 | std::vector<vmg_float> val_buffer; val_buffer.reserve(bvs.size());
|
---|
| 139 |
|
---|
| 140 | int comm_size, comm_rank;
|
---|
| 141 | MPI_Comm_rank(comm, &comm_rank);
|
---|
| 142 | MPI_Comm_size(comm, &comm_size);
|
---|
| 143 |
|
---|
| 144 | std::vector< std::vector<BoundaryValue> > bvs_distributed(comm_size);
|
---|
| 145 |
|
---|
| 146 | for (unsigned int i=0; i<bvs.size(); ++i)
|
---|
| 147 | bvs_distributed[settings.BoundaryRanks()[i]].push_back(bvs[i]);
|
---|
| 148 |
|
---|
| 149 | int bvs_count[comm_size];
|
---|
| 150 | for (int i=0; i<comm_size; ++i) {
|
---|
| 151 | bvs_count[i] = bvs_distributed[i].size();
|
---|
| 152 | for (unsigned int j=0; j<bvs_distributed[i].size(); ++j)
|
---|
| 153 | val_buffer.push_back(bvs_distributed[i][j].Val());
|
---|
| 154 | }
|
---|
| 155 |
|
---|
| 156 | MPI_Reduce_scatter(MPI_IN_PLACE, &val_buffer.front(), bvs_count, MPI_DOUBLE, MPI_SUM, comm);
|
---|
| 157 |
|
---|
| 158 | int begin = 0;
|
---|
| 159 | for (int i=0; i<comm_rank; ++i)
|
---|
| 160 | begin += bvs_distributed[i].size();
|
---|
| 161 |
|
---|
| 162 | for (unsigned int i=0; i<bvs_distributed[comm_rank].size(); ++i)
|
---|
| 163 | bvs_distributed[comm_rank][i].Val() = val_buffer[begin+i];
|
---|
| 164 |
|
---|
| 165 | return bvs_distributed[comm_rank];
|
---|
| 166 | }
|
---|
| 167 |
|
---|
| 168 | return std::vector<BoundaryValue>();
|
---|
| 169 | }
|
---|
| 170 |
|
---|
| 171 | void CommMPI::CommToGhosts(Grid& grid)
|
---|
| 172 | {
|
---|
| 173 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
---|
| 174 | if (comm != MPI_COMM_NULL) {
|
---|
| 175 | VMG::MPI::DatatypesLocal& types = settings.DatatypesLocal(grid);
|
---|
| 176 | IrecvAllBuffered(types.Halo(), comm, 2310);
|
---|
| 177 | IsendAllBuffered(grid, types.NB(), comm, 2310);
|
---|
| 178 | WaitAll();
|
---|
| 179 | ReplaceBufferAll(grid, types.Halo());
|
---|
| 180 | }
|
---|
| 181 | }
|
---|
| 182 |
|
---|
| 183 | void CommMPI::CommToGhostsAsyncStart(Grid& grid)
|
---|
| 184 | {
|
---|
| 185 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
---|
| 186 | if (comm != MPI_COMM_NULL) {
|
---|
| 187 | VMG::MPI::DatatypesLocal& types = settings.DatatypesLocal(grid);
|
---|
| 188 | IrecvAllBuffered(types.Halo(), comm, 2412);
|
---|
| 189 | IsendAllBuffered(grid, types.NB(), comm, 2412);
|
---|
| 190 | TestAll();
|
---|
| 191 | }
|
---|
| 192 | }
|
---|
| 193 |
|
---|
| 194 | void CommMPI::CommToGhostsAsyncFinish(Grid& grid)
|
---|
| 195 | {
|
---|
| 196 | WaitAll();
|
---|
| 197 | ReplaceBufferAll(grid, settings.DatatypesLocal(grid).Halo());
|
---|
| 198 | }
|
---|
| 199 |
|
---|
| 200 | void CommMPI::CommFromGhosts(Grid& grid)
|
---|
| 201 | {
|
---|
| 202 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
---|
| 203 | if (comm != MPI_COMM_NULL) {
|
---|
| 204 | VMG::MPI::DatatypesLocal& types = settings.DatatypesLocal(grid);
|
---|
| 205 | IrecvAllBuffered(types.NB(), comm, 1337);
|
---|
| 206 | IsendAllBuffered(grid, types.Halo(), comm, 1337);
|
---|
| 207 | WaitAll();
|
---|
| 208 | AddBufferAll(grid, types.NB());
|
---|
| 209 | }
|
---|
| 210 | }
|
---|
| 211 |
|
---|
| 212 | void CommMPI::CommFromGhostsAsyncStart(Grid& grid)
|
---|
| 213 | {
|
---|
| 214 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
---|
| 215 | if (comm != MPI_COMM_NULL) {
|
---|
| 216 | VMG::MPI::DatatypesLocal& types = settings.DatatypesLocal(grid);
|
---|
| 217 | IrecvAllBuffered(types.NB(), comm, 0xc0ffee);
|
---|
| 218 | IsendAllBuffered(grid, types.Halo(), comm, 0xc0ffee);
|
---|
| 219 | TestAll();
|
---|
| 220 | }
|
---|
| 221 | }
|
---|
| 222 |
|
---|
| 223 | void CommMPI::CommFromGhostsAsyncFinish(Grid& grid)
|
---|
| 224 | {
|
---|
| 225 | WaitAll();
|
---|
| 226 | AddBufferAll(grid, settings.DatatypesLocal(grid).NB());
|
---|
| 227 | }
|
---|
| 228 |
|
---|
| 229 | void CommMPI::Barrier()
|
---|
| 230 | {
|
---|
| 231 | MPI_Barrier(comm_global);
|
---|
| 232 | }
|
---|
| 233 |
|
---|
| 234 | vmg_float CommMPI::GlobalSum(vmg_float value)
|
---|
| 235 | {
|
---|
| 236 | vmg_float result = 0;
|
---|
| 237 | MPI_Allreduce(&value, &result, 1, MPI_DOUBLE, MPI_SUM, comm_global);
|
---|
| 238 | return result;
|
---|
| 239 | }
|
---|
| 240 |
|
---|
| 241 | vmg_float CommMPI::GlobalSumRoot(vmg_float value)
|
---|
| 242 | {
|
---|
| 243 | vmg_float result = 0;
|
---|
| 244 | MPI_Reduce(&value, &result, 1, MPI_DOUBLE, MPI_SUM, 0, comm_global);
|
---|
| 245 | return result;
|
---|
| 246 | }
|
---|
| 247 |
|
---|
| 248 | void CommMPI::GlobalSumArray(vmg_float* array, const vmg_int& size)
|
---|
| 249 | {
|
---|
| 250 | MPI_Allreduce(MPI_IN_PLACE, array, size, MPI_DOUBLE, MPI_SUM, comm_global);
|
---|
| 251 | }
|
---|
| 252 |
|
---|
| 253 | void CommMPI::GlobalBroadcast(vmg_float& value)
|
---|
| 254 | {
|
---|
| 255 | MPI_Bcast(&value, 1, MPI_DOUBLE, 0, comm_global);
|
---|
| 256 | }
|
---|
| 257 |
|
---|
| 258 | void CommMPI::GlobalGather(vmg_float& value, vmg_float* array)
|
---|
| 259 | {
|
---|
| 260 | MPI_Gather(&value, 1, MPI_DOUBLE, array, 1, MPI_DOUBLE, 0, comm_global);
|
---|
| 261 | }
|
---|
| 262 |
|
---|
| 263 | vmg_int CommMPI::GlobalSum(vmg_int value)
|
---|
| 264 | {
|
---|
| 265 | vmg_int result = 0;
|
---|
| 266 | MPI_Allreduce(&value, &result, 1, MPI_INT, MPI_SUM, comm_global);
|
---|
| 267 | return result;
|
---|
| 268 | }
|
---|
| 269 |
|
---|
| 270 | vmg_int CommMPI::GlobalSumRoot(vmg_int value)
|
---|
| 271 | {
|
---|
| 272 | vmg_int result = 0;
|
---|
| 273 | MPI_Reduce(&value, &result, 1, MPI_INT, MPI_SUM, 0, comm_global);
|
---|
| 274 | return result;
|
---|
| 275 | }
|
---|
| 276 |
|
---|
| 277 | void CommMPI::GlobalSumArray(vmg_int* array, const vmg_int& size)
|
---|
| 278 | {
|
---|
| 279 | MPI_Allreduce(MPI_IN_PLACE, array, size, MPI_INT, MPI_SUM, comm_global);
|
---|
| 280 | }
|
---|
| 281 |
|
---|
| 282 | vmg_float CommMPI::GlobalMax(vmg_float value)
|
---|
| 283 | {
|
---|
| 284 | vmg_float result = 0.0;
|
---|
| 285 | MPI_Allreduce(&value, &result, 1, MPI_DOUBLE, MPI_MAX, comm_global);
|
---|
| 286 | return result;
|
---|
| 287 | }
|
---|
| 288 |
|
---|
| 289 | vmg_float CommMPI::GlobalMaxRoot(vmg_float value)
|
---|
| 290 | {
|
---|
| 291 | vmg_float result = 0.0;
|
---|
| 292 | MPI_Reduce(&value, &result, 1, MPI_DOUBLE, MPI_MAX, 0, comm_global);
|
---|
| 293 | return result;
|
---|
| 294 | }
|
---|
| 295 |
|
---|
| 296 | void CommMPI::GlobalMaxArray(vmg_float* array, const vmg_int& size)
|
---|
| 297 | {
|
---|
| 298 | MPI_Allreduce(MPI_IN_PLACE, array, size, MPI_DOUBLE, MPI_MAX, comm_global);
|
---|
| 299 | }
|
---|
| 300 |
|
---|
| 301 | vmg_int CommMPI::GlobalMax(vmg_int value)
|
---|
| 302 | {
|
---|
| 303 | vmg_int result = 0;
|
---|
| 304 | MPI_Allreduce(&value, &result, 1, MPI_INT, MPI_MAX, comm_global);
|
---|
| 305 | return result;
|
---|
| 306 | }
|
---|
| 307 |
|
---|
| 308 | vmg_int CommMPI::GlobalMaxRoot(vmg_int value)
|
---|
| 309 | {
|
---|
| 310 | vmg_int result = 0;
|
---|
| 311 | MPI_Reduce(&value, &result, 1, MPI_INT, MPI_MAX, 0, comm_global);
|
---|
| 312 | return result;
|
---|
| 313 | }
|
---|
| 314 |
|
---|
| 315 | void CommMPI::GlobalMaxArray(vmg_int* array, const vmg_int& size)
|
---|
| 316 | {
|
---|
| 317 | MPI_Allreduce(MPI_IN_PLACE, array, size, MPI_INT, MPI_MAX, comm_global);
|
---|
| 318 | }
|
---|
| 319 |
|
---|
| 320 | void CommMPI::GlobalBroadcast(vmg_int& value)
|
---|
| 321 | {
|
---|
| 322 | MPI_Bcast(&value, 1, MPI_INT, 0, comm_global);
|
---|
| 323 | }
|
---|
| 324 |
|
---|
| 325 | void CommMPI::GlobalGather(vmg_int& value, vmg_int* array)
|
---|
| 326 | {
|
---|
| 327 | MPI_Gather(&value, 1, MPI_INT, array, 1, MPI_INT, 0, comm_global);
|
---|
| 328 | }
|
---|
| 329 |
|
---|
| 330 | void CommMPI::GlobalBroadcast(char* str)
|
---|
| 331 | {
|
---|
| 332 | int size = std::strlen(str) + 1;
|
---|
| 333 | MPI_Bcast(&size, 1, MPI_INT, 0, comm_global);
|
---|
| 334 | MPI_Bcast(str, size, MPI_CHAR, 0, comm_global);
|
---|
| 335 | }
|
---|
| 336 |
|
---|
| 337 | vmg_float CommMPI::LevelSum(const Grid& grid, vmg_float value)
|
---|
| 338 | {
|
---|
| 339 | vmg_float result = 0.0;
|
---|
| 340 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
---|
| 341 | assert(comm != MPI_COMM_NULL);
|
---|
| 342 | MPI_Allreduce(&value, &result, 1, MPI_DOUBLE, MPI_SUM, comm);
|
---|
| 343 | return result;
|
---|
| 344 | }
|
---|
| 345 |
|
---|
| 346 | vmg_float CommMPI::LevelSumRoot(const Grid& grid, vmg_float value)
|
---|
| 347 | {
|
---|
| 348 | vmg_float result = 0.0;
|
---|
| 349 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
---|
| 350 | assert(comm != MPI_COMM_NULL);
|
---|
| 351 | MPI_Reduce(&value, &result, 1, MPI_DOUBLE, MPI_SUM, 0, comm);
|
---|
| 352 | return result;
|
---|
| 353 | }
|
---|
| 354 |
|
---|
| 355 | void CommMPI::LevelSumArray(const Grid& grid, vmg_float* array, const vmg_int& size)
|
---|
| 356 | {
|
---|
| 357 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
---|
| 358 | assert(comm != MPI_COMM_NULL);
|
---|
| 359 | MPI_Allreduce(MPI_IN_PLACE, array, size, MPI_DOUBLE, MPI_SUM, comm);
|
---|
| 360 | }
|
---|
| 361 |
|
---|
| 362 | vmg_int CommMPI::LevelSum(const Grid& grid, vmg_int value)
|
---|
| 363 | {
|
---|
| 364 | vmg_int result = 0;
|
---|
| 365 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
---|
| 366 | assert(comm != MPI_COMM_NULL);
|
---|
| 367 | MPI_Allreduce(&value, &result, 1, MPI_INT, MPI_SUM, comm);
|
---|
| 368 | return result;
|
---|
| 369 | }
|
---|
| 370 |
|
---|
| 371 | vmg_int CommMPI::LevelSumRoot(const Grid& grid, vmg_int value)
|
---|
| 372 | {
|
---|
| 373 | vmg_int result = 0;
|
---|
| 374 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
---|
| 375 | assert(comm != MPI_COMM_NULL);
|
---|
| 376 | MPI_Reduce(&value, &result, 1, MPI_INT, MPI_SUM, 0, comm);
|
---|
| 377 | return result;
|
---|
| 378 | }
|
---|
| 379 |
|
---|
| 380 | void CommMPI::LevelSumArray(const Grid& grid, vmg_int* array, const vmg_int& size)
|
---|
| 381 | {
|
---|
| 382 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
---|
| 383 | assert(comm != MPI_COMM_NULL);
|
---|
| 384 | MPI_Allreduce(MPI_IN_PLACE, array, size, MPI_INT, MPI_SUM, comm);
|
---|
| 385 | }
|
---|
| 386 |
|
---|
| 387 | //TODO: Variable length buffer
|
---|
| 388 | void CommMPI::Print(const OutputLevel level, const char* format, ...)
|
---|
| 389 | {
|
---|
| 390 | bool print = (level == Output);
|
---|
| 391 |
|
---|
| 392 | #ifdef OUTPUT_INFO
|
---|
| 393 | print |= (level == Info);
|
---|
| 394 | #endif
|
---|
| 395 | #ifdef OUTPUT_DEBUG
|
---|
| 396 | print |= (level == Debug);
|
---|
| 397 | #endif
|
---|
| 398 | #ifdef OUTPUT_TIMING
|
---|
| 399 | print |= (level == Timing);
|
---|
| 400 | #endif
|
---|
| 401 |
|
---|
| 402 | if (print) {
|
---|
| 403 | va_list args;
|
---|
| 404 | va_start(args, format);
|
---|
| 405 | vsprintf(print_buffer, format, args);
|
---|
| 406 | printf("vmg: Rank %d: %s\n", GlobalRank(), print_buffer);
|
---|
| 407 | va_end(args);
|
---|
| 408 | }
|
---|
| 409 | }
|
---|
| 410 |
|
---|
| 411 | void CommMPI::PrintOnce(const OutputLevel level, const char* format, ...)
|
---|
| 412 | {
|
---|
| 413 | bool print = (level == Output);
|
---|
| 414 |
|
---|
| 415 | #ifdef OUTPUT_INFO
|
---|
| 416 | print |= (level == Info);
|
---|
| 417 | #endif
|
---|
| 418 | #ifdef OUTPUT_DEBUG
|
---|
| 419 | print |= (level == Debug);
|
---|
| 420 | #endif
|
---|
| 421 | #ifdef OUTPUT_TIMING
|
---|
| 422 | print |= (level == Timing);
|
---|
| 423 | #endif
|
---|
| 424 |
|
---|
| 425 | if (GlobalRank() == 0 && print) {
|
---|
| 426 | va_list args;
|
---|
| 427 | va_start(args, format);
|
---|
| 428 | vsprintf(print_buffer, format, args);
|
---|
| 429 | printf("vmg: Rank %d: %s\n", GlobalRank(), print_buffer);
|
---|
| 430 | va_end(args);
|
---|
| 431 | }
|
---|
| 432 | }
|
---|
| 433 |
|
---|
| 434 | void CommMPI::PrintXML(const std::string& filename, const std::string& xml_data)
|
---|
| 435 | {
|
---|
| 436 | MPI_File file;
|
---|
| 437 | std::stringstream path, xml_header;
|
---|
| 438 |
|
---|
| 439 | pugi::xml_document doc;
|
---|
| 440 | pugi::xml_node node_data = doc.append_child("Global").append_child("NumProcesses").append_child(pugi::node_pcdata);
|
---|
| 441 | node_data.set_value(Helper::ToString(GlobalProcs().Product()).c_str());
|
---|
| 442 | doc.save(xml_header);
|
---|
| 443 |
|
---|
| 444 | path << OutputPath() << filename;
|
---|
| 445 |
|
---|
| 446 | char* filename_array = Helper::GetCharArray(path.str());
|
---|
| 447 | char* xml_header_array = Helper::GetCharArray(xml_header.str());
|
---|
| 448 | char* str_array = Helper::GetCharArray(xml_data);
|
---|
| 449 |
|
---|
| 450 | MPI_File_open(MPI_COMM_SELF, filename_array, MPI_MODE_WRONLY|MPI_MODE_CREATE, MPI_INFO_NULL, &file);
|
---|
| 451 | MPI_File_set_size(file, 0);
|
---|
| 452 | MPI_File_write(file, xml_header_array, xml_header.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
|
---|
| 453 | MPI_File_write(file, str_array, xml_data.size(), MPI_CHAR, MPI_STATUS_IGNORE);
|
---|
| 454 | MPI_File_close(&file);
|
---|
| 455 |
|
---|
| 456 | delete [] filename_array;
|
---|
| 457 | delete [] xml_header_array;
|
---|
| 458 | delete [] str_array;
|
---|
| 459 | }
|
---|
| 460 |
|
---|
| 461 | void CommMPI::PrintXMLAll(const std::string& filename, const std::string& xml_data)
|
---|
| 462 | {
|
---|
| 463 | MPI_File file;
|
---|
| 464 | std::stringstream path;
|
---|
| 465 |
|
---|
| 466 | path << OutputPath() << filename;
|
---|
| 467 |
|
---|
| 468 | char* filename_array = Helper::GetCharArray(path.str());
|
---|
| 469 | char* str_array = Helper::GetCharArray(xml_data);
|
---|
| 470 |
|
---|
| 471 | MPI_File_open(comm_global, filename_array, MPI_MODE_WRONLY|MPI_MODE_CREATE, MPI_INFO_NULL, &file);
|
---|
| 472 | MPI_File_set_size(file, 0);
|
---|
| 473 |
|
---|
| 474 | if (GlobalRank() == 0) {
|
---|
| 475 | std::stringstream xml_header;
|
---|
| 476 | pugi::xml_document doc;
|
---|
| 477 | pugi::xml_node node_data = doc.append_child("Global").append_child("NumProcesses").append_child(pugi::node_pcdata);
|
---|
| 478 | node_data.set_value(Helper::ToString(GlobalProcs().Product()).c_str());
|
---|
| 479 | doc.save(xml_header);
|
---|
| 480 |
|
---|
| 481 | char* xml_header_array = Helper::GetCharArray(xml_header.str());
|
---|
| 482 |
|
---|
| 483 | MPI_File_write_shared(file, xml_header_array, xml_header.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
|
---|
| 484 |
|
---|
| 485 | delete [] xml_header_array;
|
---|
| 486 | }
|
---|
| 487 |
|
---|
| 488 | MPI_File_write_ordered(file, str_array, xml_data.size(), MPI_CHAR, MPI_STATUS_IGNORE);
|
---|
| 489 | MPI_File_close(&file);
|
---|
| 490 |
|
---|
| 491 | delete [] filename_array;
|
---|
| 492 | delete [] str_array;
|
---|
| 493 | }
|
---|
| 494 |
|
---|
| 495 | void CommMPI::PrintGridInformation(const Grid& grid, char* filename, const std::string& name)
|
---|
| 496 | {
|
---|
| 497 | std::stringstream buf;
|
---|
| 498 | MPI_File file;
|
---|
| 499 | int rank, size;
|
---|
| 500 | int size_local, size_local_max;
|
---|
| 501 |
|
---|
| 502 | MPI_Comm comm = settings.CommunicatorGlobal(grid);
|
---|
| 503 | MPI_Comm comm_local = settings.CommunicatorLocal(grid);
|
---|
| 504 |
|
---|
| 505 | if (comm_local != MPI_COMM_NULL)
|
---|
| 506 | MPI_Comm_size(comm_local, &size_local);
|
---|
| 507 | else
|
---|
| 508 | size_local = 0;
|
---|
| 509 |
|
---|
| 510 | if (comm != MPI_COMM_NULL) {
|
---|
| 511 |
|
---|
| 512 | MPI_Reduce(&size_local, &size_local_max, 1, MPI_INT, MPI_MAX, 0, comm);
|
---|
| 513 |
|
---|
| 514 | MPI_File_open(comm, filename, MPI_MODE_WRONLY|MPI_MODE_CREATE|MPI_MODE_APPEND, MPI_INFO_NULL, &file);
|
---|
| 515 |
|
---|
| 516 | MPI_Comm_rank(comm, &rank);
|
---|
| 517 | MPI_Comm_size(comm, &size);
|
---|
| 518 |
|
---|
| 519 | if (rank == 0) {
|
---|
| 520 |
|
---|
| 521 | buf << "###########################################################" << std::endl
|
---|
| 522 | << "GLOBAL INFORMATION:" << std::endl
|
---|
| 523 | << " NAME: " << name << std::endl
|
---|
| 524 | << " LEVEL: " << grid.Level() << std::endl
|
---|
| 525 | << " COMM_SIZE_GLOBAL: " << size << std::endl
|
---|
| 526 | << " COMM_SIZE_LOCAL: " << size_local_max << std::endl
|
---|
| 527 | << " GLOBAL:" << std::endl
|
---|
| 528 | << " GLOBAL_BEGIN: " << grid.Global().GlobalBegin() << std::endl
|
---|
| 529 | << " GLOBAL_END: " << grid.Global().GlobalEnd() << std::endl
|
---|
| 530 | << " GLOBAL_SIZE: " << grid.Global().GlobalSize() << std::endl
|
---|
| 531 | << " GLOBAL_BEGIN_FINEST: " << grid.Global().GlobalBeginFinest() << std::endl
|
---|
| 532 | << " GLOBAL_END_FINEST: " << grid.Global().GlobalEndFinest() << std::endl
|
---|
| 533 | << " GLOBAL_SIZE_FINEST: " << grid.Global().GlobalSizeFinest() << std::endl
|
---|
| 534 | << " EXTENT:" << std::endl
|
---|
| 535 | << " BEGIN: " << grid.Extent().Begin() << std::endl
|
---|
| 536 | << " END: " << grid.Extent().End() << std::endl
|
---|
| 537 | << " SIZE: " << grid.Extent().Size() << std::endl
|
---|
| 538 | << " MESH_WIDTH: " << grid.Extent().MeshWidth() << std::endl
|
---|
| 539 | << std::endl
|
---|
| 540 | << "LOCAL INFORMATION:" << std::endl;
|
---|
| 541 | }
|
---|
| 542 |
|
---|
| 543 | buf << "RANK " << rank << ":" << std::endl
|
---|
| 544 | << " GLOBAL:" << std::endl
|
---|
| 545 | << " LOCAL_BEGIN: " << grid.Global().LocalBegin() << std::endl
|
---|
| 546 | << " LOCAL_END: " << grid.Global().LocalEnd() << std::endl
|
---|
| 547 | << " LOCAL_SIZE: " << grid.Global().LocalSize() << std::endl
|
---|
| 548 | << " BOUNDARY_TYPE: " << grid.Global().BoundaryType() << std::endl
|
---|
| 549 | << " LOCAL:" << std::endl
|
---|
| 550 | << " BEGIN: " << grid.Local().Begin() << std::endl
|
---|
| 551 | << " END: " << grid.Local().End() << std::endl
|
---|
| 552 | << " SIZE: " << grid.Local().Size() << std::endl
|
---|
| 553 | << " SIZE_TOTAL: " << grid.Local().SizeTotal() << std::endl
|
---|
| 554 | << " HALO_BEGIN_1: " << grid.Local().HaloBegin1() << std::endl
|
---|
| 555 | << " HALO_END_1: " << grid.Local().HaloEnd1() << std::endl
|
---|
| 556 | << " HALO_SIZE_1: " << grid.Local().HaloSize1() << std::endl
|
---|
| 557 | << " HALO_BEGIN_2: " << grid.Local().HaloBegin2() << std::endl
|
---|
| 558 | << " HALO_END_2: " << grid.Local().HaloEnd2() << std::endl
|
---|
| 559 | << " HALO_SIZE_2: " << grid.Local().HaloSize2() << std::endl
|
---|
| 560 | << " BOUNDARY_BEGIN_1: " << grid.Local().BoundaryBegin1() << std::endl
|
---|
| 561 | << " BOUNDARY_END_1: " << grid.Local().BoundaryEnd1() << std::endl
|
---|
| 562 | << " BOUNDARY_SIZE_1: " << grid.Local().BoundarySize1() << std::endl
|
---|
| 563 | << " BOUNDARY_BEGIN_2: " << grid.Local().BoundaryBegin2() << std::endl
|
---|
| 564 | << " BOUNDARY_END_2: " << grid.Local().BoundaryEnd2() << std::endl
|
---|
| 565 | << " BOUNDARY_SIZE_2: " << grid.Local().BoundarySize2() << std::endl;
|
---|
| 566 |
|
---|
| 567 | if (rank == size-1)
|
---|
| 568 | buf << "###########################################################" << std::endl;
|
---|
| 569 |
|
---|
| 570 | char* char_buf = Helper::GetCharArray(buf.str());
|
---|
| 571 | MPI_File_write_ordered(file, char_buf, buf.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
|
---|
| 572 | delete [] char_buf;
|
---|
| 573 |
|
---|
| 574 | MPI_File_close(&file);
|
---|
| 575 |
|
---|
| 576 | }
|
---|
| 577 | }
|
---|
| 578 |
|
---|
| 579 | void CommMPI::PrintDatatypes(char* filename)
|
---|
| 580 | {
|
---|
| 581 | std::stringstream buf;
|
---|
| 582 | MPI_File file;
|
---|
| 583 |
|
---|
| 584 | if (comm_global != MPI_COMM_NULL) {
|
---|
| 585 |
|
---|
| 586 | MPI_File_open(comm_global, filename, MPI_MODE_WRONLY|MPI_MODE_CREATE|MPI_MODE_APPEND, MPI_INFO_NULL, &file);
|
---|
| 587 |
|
---|
| 588 | buf << settings << std::endl;
|
---|
| 589 |
|
---|
| 590 | char* char_buf = Helper::GetCharArray(buf.str());
|
---|
| 591 | MPI_File_write_ordered(file, char_buf, buf.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
|
---|
| 592 | delete [] char_buf;
|
---|
| 593 |
|
---|
| 594 | MPI_File_close(&file);
|
---|
| 595 |
|
---|
| 596 | }
|
---|
| 597 | }
|
---|
| 598 |
|
---|
| 599 | void CommMPI::PrintAllSettings()
|
---|
| 600 | {
|
---|
| 601 | std::stringstream buf;
|
---|
| 602 | MPI_File file;
|
---|
| 603 |
|
---|
| 604 | const Multigrid& mg = *MG::GetSol();
|
---|
| 605 |
|
---|
| 606 | buf << OutputPath() << "settings.txt";
|
---|
| 607 | char *filename = Helper::GetCharArray(buf.str());
|
---|
| 608 |
|
---|
| 609 | MPI_File_open(comm_global, filename, MPI_MODE_WRONLY|MPI_MODE_CREATE, MPI_INFO_NULL, &file);
|
---|
| 610 | MPI_File_set_size(file, 0);
|
---|
| 611 | MPI_File_close(&file);
|
---|
| 612 |
|
---|
| 613 | for (int i=mg.MinLevel(); i<=mg.MaxLevel(); ++i)
|
---|
| 614 | PrintGridInformation(mg(i), filename, "MULTIGRID");
|
---|
| 615 |
|
---|
| 616 | for (int i=mg.MinLevel()+1; i<=mg.MaxLevel(); ++i)
|
---|
| 617 | PrintGridInformation(settings.CoarserGrid(mg(i)), filename, "COARSER_GRID");
|
---|
| 618 |
|
---|
| 619 | for (int i=mg.MinLevel(); i<mg.MaxLevel(); ++i)
|
---|
| 620 | PrintGridInformation(settings.FinerGrid(mg(i)), filename, "FINER_GRID");
|
---|
| 621 |
|
---|
| 622 | PrintDatatypes(filename);
|
---|
| 623 |
|
---|
| 624 | delete [] filename;
|
---|
| 625 |
|
---|
| 626 | }
|
---|
| 627 |
|
---|
| 628 | void CommMPI::PrintGrid(Grid& grid, const char* information)
|
---|
| 629 | {
|
---|
| 630 | int output_count = OutputCount();
|
---|
| 631 |
|
---|
| 632 | #ifdef HAVE_VTK
|
---|
| 633 |
|
---|
| 634 | if (settings.CommunicatorLocal(grid) != MPI_COMM_NULL) {
|
---|
| 635 |
|
---|
| 636 | vtkSmartPointer<vtkImageData> image = vtkSmartPointer<vtkImageData>::New();
|
---|
| 637 | image->SetExtent(grid.Global().LocalBegin().X(), grid.Global().LocalEnd().X()-1,
|
---|
| 638 | grid.Global().LocalBegin().Y(), grid.Global().LocalEnd().Y()-1,
|
---|
| 639 | grid.Global().LocalBegin().Z(), grid.Global().LocalEnd().Z()-1);
|
---|
| 640 |
|
---|
| 641 | image->SetSpacing(grid.Extent().MeshWidth().vec());
|
---|
| 642 | image->SetOrigin(grid.Extent().Begin().vec());
|
---|
| 643 | image->SetScalarTypeToDouble();
|
---|
| 644 | image->SetNumberOfScalarComponents(1);
|
---|
| 645 | image->AllocateScalars();
|
---|
| 646 | image->GetPointData()->GetScalars()->SetName(information);
|
---|
| 647 |
|
---|
| 648 | Index i;
|
---|
| 649 | Index begin, end, offset;
|
---|
| 650 |
|
---|
| 651 | for (int j=0; j<3; ++j) {
|
---|
| 652 | if (grid.Local().BoundarySize1()[j] > 0) {
|
---|
| 653 | begin[j] = grid.Local().BoundaryBegin1()[j];
|
---|
| 654 | offset[j] = grid.Global().LocalBegin()[j];
|
---|
| 655 | } else {
|
---|
| 656 | begin[j] = grid.Local().Begin()[j];
|
---|
| 657 | offset[j] = grid.Global().LocalBegin()[j] - grid.Local().Begin()[j];
|
---|
| 658 | }
|
---|
| 659 | end[j] = (grid.Local().BoundarySize2()[j] > 0 ? grid.Local().BoundaryEnd2()[j] : grid.Local().End()[j]);
|
---|
| 660 | }
|
---|
| 661 |
|
---|
| 662 | for (i.X()=begin.X(); i.X()<end.X(); ++i.X())
|
---|
| 663 | for (i.Y()=begin.Y(); i.Y()<end.Y(); ++i.Y())
|
---|
| 664 | for (i.Z()=begin.Z(); i.Z()<end.Z(); ++i.Z())
|
---|
| 665 | image->SetScalarComponentFromDouble(i.X() + offset.X(),
|
---|
| 666 | i.Y() + offset.Y(),
|
---|
| 667 | i.Z() + offset.Z(),
|
---|
| 668 | 0, grid.GetVal(i));
|
---|
| 669 |
|
---|
| 670 | image->Update();
|
---|
| 671 |
|
---|
| 672 | int rank, size;
|
---|
| 673 | MPI_Comm_rank(comm_global, &rank);
|
---|
| 674 | MPI_Comm_size(comm_global, &size);
|
---|
| 675 |
|
---|
| 676 | std::stringstream filename;
|
---|
| 677 | filename << OutputPath() << std::setw(4) << std::setfill('0') << output_count << "_" << rank << ".vti";
|
---|
| 678 |
|
---|
| 679 | vtkSmartPointer<vtkXMLImageDataWriter> writer = vtkSmartPointer<vtkXMLImageDataWriter>::New();
|
---|
| 680 | writer->SetFileName(filename.str().c_str());
|
---|
| 681 | writer->SetCompressorTypeToNone();
|
---|
| 682 | writer->SetDataModeToAscii();
|
---|
| 683 | writer->SetInput(image);
|
---|
| 684 | writer->Update();
|
---|
| 685 | writer->Write();
|
---|
| 686 |
|
---|
| 687 | }
|
---|
| 688 |
|
---|
| 689 | //FIXME
|
---|
| 690 | #else /* HAVE_VTK */
|
---|
| 691 | Index i;
|
---|
| 692 | std::stringstream buf;
|
---|
| 693 |
|
---|
| 694 | Index begin, end;
|
---|
| 695 | Index begin_local, end_local, begin_global, end_global;
|
---|
| 696 |
|
---|
| 697 | CommToGhosts(grid);
|
---|
| 698 |
|
---|
| 699 | for (int i=0; i<3; ++i) {
|
---|
| 700 | end[i] = grid.Local().End()[i] + (grid.Global().LocalEnd()[i] == grid.Global().GlobalSize()[i] ? 0 : grid.Local().HaloSize1()[i]);
|
---|
| 701 | end_local[i] = grid.Global().LocalEnd()[i] - (grid.Global().LocalEnd()[i] == grid.Global().GlobalSize()[i] ? 1 : 0);
|
---|
| 702 | }
|
---|
| 703 |
|
---|
| 704 | begin = grid.Local().Begin();
|
---|
| 705 | begin_local = grid.Global().LocalBegin();
|
---|
| 706 | begin_global = 0;
|
---|
| 707 | end_global = grid.Global().GlobalSize()-1;
|
---|
| 708 |
|
---|
| 709 | for (i.Z()=begin.Z(); i.Z()<end.Z(); ++i.Z())
|
---|
| 710 | for (i.Y()=begin.Y(); i.Y()<end.Y(); ++i.Y())
|
---|
| 711 | for (i.X()=begin.X(); i.X()<end.X(); ++i.X())
|
---|
| 712 | buf << std::scientific << grid.GetVal(i) << " ";
|
---|
| 713 |
|
---|
| 714 | CreateOutputFiles(grid, buf, information,
|
---|
| 715 | begin_global, end_global,
|
---|
| 716 | begin_local, end_local,
|
---|
| 717 | output_count);
|
---|
| 718 | #endif /* HAVE_VTK */
|
---|
| 719 | }
|
---|
| 720 |
|
---|
| 721 | void CommMPI::PrintDefect(Grid& sol, Grid& rhs, const char* information)
|
---|
| 722 | {
|
---|
| 723 | TempGrid *temp = MG::GetTempGrid();
|
---|
| 724 | temp->SetProperties(sol);
|
---|
| 725 | temp->ImportFromResidual(sol, rhs);
|
---|
| 726 | PrintGrid(*temp, information);
|
---|
| 727 | }
|
---|
| 728 |
|
---|
| 729 | void CommMPI::CreateOutputFiles(const Grid& grid, const std::stringstream& serial_data, const char* information,
|
---|
| 730 | const Index& begin_global, const Index& end_global,
|
---|
| 731 | const Index& begin_local, const Index& end_local,
|
---|
| 732 | const int& output_count)
|
---|
| 733 | {
|
---|
| 734 | MPI_Comm comm = settings.CommunicatorGlobal(grid);
|
---|
| 735 |
|
---|
| 736 | if (comm != MPI_COMM_NULL) {
|
---|
| 737 |
|
---|
| 738 | MPI_File file;
|
---|
| 739 | std::string conv_information = Helper::ReplaceWhitespaces(information, "_");
|
---|
| 740 |
|
---|
| 741 | CreateParallelOutputFile(grid, comm, output_count, conv_information.c_str(),
|
---|
| 742 | begin_global, end_global, begin_local, end_local);
|
---|
| 743 |
|
---|
| 744 | file = CreateSerialOutputFile(grid, comm, output_count, conv_information.c_str(),
|
---|
| 745 | begin_global, end_global, begin_local, end_local);
|
---|
| 746 |
|
---|
| 747 | char *char_buf = Helper::GetCharArray(serial_data.str());
|
---|
| 748 | MPI_File_write(file, char_buf, serial_data.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
|
---|
| 749 | delete [] char_buf;
|
---|
| 750 |
|
---|
| 751 | FinalizeSerialOutputFile(file);
|
---|
| 752 |
|
---|
| 753 | }
|
---|
| 754 | }
|
---|
| 755 |
|
---|
| 756 | void CommMPI::CreateParallelOutputFile(const Grid& grid, MPI_Comm& comm,
|
---|
| 757 | const int& output_count, const char* information,
|
---|
| 758 | const Index& begin_global, const Index& end_global,
|
---|
| 759 | const Index& begin_local, const Index& end_local)
|
---|
| 760 | {
|
---|
| 761 | int rank;
|
---|
| 762 | MPI_File file;
|
---|
| 763 | char parallel_filename[513], serial_filename[513];
|
---|
| 764 | std::stringstream buf;
|
---|
| 765 |
|
---|
| 766 | MPI_Comm_rank(comm, &rank);
|
---|
| 767 |
|
---|
| 768 | sprintf(parallel_filename, "%s%04d.pvti", OutputPath().c_str(), output_count);
|
---|
| 769 | sprintf(serial_filename, "%04d_%d.vti", output_count, rank);
|
---|
| 770 |
|
---|
| 771 | MPI_File_open(comm, parallel_filename, MPI_MODE_WRONLY|MPI_MODE_CREATE, MPI_INFO_NULL, &file);
|
---|
| 772 | MPI_File_set_size(file, 0);
|
---|
| 773 |
|
---|
| 774 | if (rank == 0) {
|
---|
| 775 |
|
---|
| 776 | buf << "<?xml version=\"1.0\"?>" << std::endl
|
---|
| 777 | << "<VTKFile type=\"PImageData\" version=\"0.1\" byte_order=\"LittleEndian\">" << std::endl
|
---|
| 778 | << " <PImageData WholeExtent=\"";
|
---|
| 779 |
|
---|
| 780 | for (int i=0; i<3; ++i)
|
---|
| 781 | buf << begin_global[i] << " " << end_global[i] << " ";
|
---|
| 782 |
|
---|
| 783 | buf << "\"" << std::endl
|
---|
| 784 | << " GhostLevel=\"0\" Origin=\"0 0 0\" Spacing=\"";
|
---|
| 785 |
|
---|
| 786 | for (int i=0; i<3; ++i)
|
---|
| 787 | buf << grid.Extent().MeshWidth()[i] << " ";
|
---|
| 788 |
|
---|
| 789 | buf << "\">" << std::endl
|
---|
| 790 | << " <PPointData Scalars=\"" << information << "\">" << std::endl
|
---|
| 791 | << " <PDataArray type=\"Float32\" Name=\"" << information << "\"/>" << std::endl
|
---|
| 792 | << " </PPointData>" << std::endl;
|
---|
| 793 |
|
---|
| 794 | char* char_buf = Helper::GetCharArray(buf.str());
|
---|
| 795 |
|
---|
| 796 | MPI_File_write_shared(file, char_buf, buf.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
|
---|
| 797 |
|
---|
| 798 | delete [] char_buf;
|
---|
| 799 | }
|
---|
| 800 |
|
---|
| 801 | buf.str("");
|
---|
| 802 |
|
---|
| 803 | if ((end_local-begin_local).Product() > 0) {
|
---|
| 804 | buf << " <Piece Extent=\"";
|
---|
| 805 |
|
---|
| 806 | for (int i=0; i<3; ++i)
|
---|
| 807 | buf << begin_local[i] << " " << end_local[i] << " ";
|
---|
| 808 |
|
---|
| 809 | buf << "\" Source=\"" << serial_filename << "\"/>" << std::endl;
|
---|
| 810 | }
|
---|
| 811 |
|
---|
| 812 | char* char_buf = Helper::GetCharArray(buf.str());
|
---|
| 813 |
|
---|
| 814 | MPI_File_write_ordered(file, char_buf, buf.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
|
---|
| 815 |
|
---|
| 816 | delete [] char_buf;
|
---|
| 817 |
|
---|
| 818 | if (rank == 0) {
|
---|
| 819 |
|
---|
| 820 | buf.str("");
|
---|
| 821 |
|
---|
| 822 | buf << " </PImageData>" << std::endl
|
---|
| 823 | << "</VTKFile>" << std::endl;
|
---|
| 824 |
|
---|
| 825 | char* char_buf = Helper::GetCharArray(buf.str());
|
---|
| 826 |
|
---|
| 827 | MPI_File_write_shared(file, char_buf, buf.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
|
---|
| 828 |
|
---|
| 829 | delete [] char_buf;
|
---|
| 830 | }
|
---|
| 831 |
|
---|
| 832 | MPI_File_close(&file);
|
---|
| 833 | }
|
---|
| 834 |
|
---|
| 835 | MPI_File CommMPI::CreateSerialOutputFile(const Grid& grid, MPI_Comm& comm,
|
---|
| 836 | const int& output_count, const char* information,
|
---|
| 837 | const Index& begin_global, const Index& end_global,
|
---|
| 838 | const Index& begin_local, const Index& end_local)
|
---|
| 839 | {
|
---|
| 840 | char serial_filename[513];
|
---|
| 841 | int rank;
|
---|
| 842 | MPI_File file;
|
---|
| 843 | std::stringstream buf;
|
---|
| 844 |
|
---|
| 845 | MPI_Comm_rank(comm_global, &rank);
|
---|
| 846 |
|
---|
| 847 | sprintf(serial_filename, "%s%04d_%d.vti", OutputPath().c_str(), output_count, rank);
|
---|
| 848 |
|
---|
| 849 | MPI_File_open(MPI_COMM_SELF, serial_filename, MPI_MODE_WRONLY|MPI_MODE_CREATE, MPI_INFO_NULL, &file);
|
---|
| 850 |
|
---|
| 851 | buf << "<?xml version=\"1.0\"?>" << std::endl
|
---|
| 852 | << "<VTKFile type=\"ImageData\" version=\"0.1\" byte_order=\"LittleEndian\">" << std::endl
|
---|
| 853 | << " <ImageData WholeExtent=\"";
|
---|
| 854 |
|
---|
| 855 | for (int i=0; i<3; ++i)
|
---|
| 856 | buf << begin_global[i] << " " << end_global[i] << " ";
|
---|
| 857 |
|
---|
| 858 | buf << "\"" << std::endl
|
---|
| 859 | << " Origin=\"0 0 0\" Spacing=\"";
|
---|
| 860 |
|
---|
| 861 | for (int i=0; i<3; ++i)
|
---|
| 862 | buf << grid.Extent().MeshWidth()[i] << " ";
|
---|
| 863 |
|
---|
| 864 | buf << "\">" << std::endl
|
---|
| 865 | << " <Piece Extent=\"";
|
---|
| 866 |
|
---|
| 867 | for (int i=0; i<3; ++i)
|
---|
| 868 | buf << begin_local[i] << " " << end_local[i] << " ";
|
---|
| 869 |
|
---|
| 870 | buf << "\">" << std::endl
|
---|
| 871 | << " <PointData Scalars=\"" << information << "\">" << std::endl
|
---|
| 872 | << " <DataArray type=\"Float32\" Name=\"" << information << "\" format=\"ascii\">" << std::endl
|
---|
| 873 | << " ";
|
---|
| 874 |
|
---|
| 875 | char* char_buf = Helper::GetCharArray(buf.str());
|
---|
| 876 | MPI_File_write(file, char_buf, buf.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
|
---|
| 877 | delete [] char_buf;
|
---|
| 878 |
|
---|
| 879 | return file;
|
---|
| 880 | }
|
---|
| 881 |
|
---|
| 882 | void CommMPI::FinalizeSerialOutputFile(MPI_File& file)
|
---|
| 883 | {
|
---|
| 884 | std::stringstream buf;
|
---|
| 885 |
|
---|
| 886 | buf << std::endl
|
---|
| 887 | << " </DataArray>" << std::endl
|
---|
| 888 | << " </PointData>" << std::endl
|
---|
| 889 | << " </Piece>" << std::endl
|
---|
| 890 | << " </ImageData>" << std::endl
|
---|
| 891 | << "</VTKFile>" << std::endl;
|
---|
| 892 |
|
---|
| 893 | char* char_buf = Helper::GetCharArray(buf.str());
|
---|
| 894 | MPI_File_write(file, char_buf, buf.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
|
---|
| 895 | delete [] char_buf;
|
---|
| 896 |
|
---|
| 897 | MPI_File_close(&file);
|
---|
| 898 | }
|
---|
| 899 |
|
---|
| 900 | int CommMPI::GlobalRank() const
|
---|
| 901 | {
|
---|
| 902 | int rank;
|
---|
| 903 | MPI_Comm_rank(comm_global, &rank);
|
---|
| 904 | return rank;
|
---|
| 905 | }
|
---|
| 906 |
|
---|
| 907 | int CommMPI::GlobalSize() const
|
---|
| 908 | {
|
---|
| 909 | int size;
|
---|
| 910 | MPI_Comm_size(comm_global, &size);
|
---|
| 911 | return size;
|
---|
| 912 | }
|
---|
| 913 |
|
---|
| 914 | Index CommMPI::GlobalPos() const
|
---|
| 915 | {
|
---|
| 916 | Index dims, periods, coords;
|
---|
| 917 | MPI_Cart_get(comm_global, 3, dims.vec(), periods.vec(), coords.vec());
|
---|
| 918 | return coords;
|
---|
| 919 | }
|
---|
| 920 |
|
---|
| 921 | Index CommMPI::GlobalProcs() const
|
---|
| 922 | {
|
---|
| 923 | Index dims, periods, coords;
|
---|
| 924 | MPI_Cart_get(comm_global, 3, dims.vec(), periods.vec(), coords.vec());
|
---|
| 925 | return dims;
|
---|
| 926 | }
|
---|
| 927 |
|
---|
| 928 | int CommMPI::Rank(const Grid& grid) const
|
---|
| 929 | {
|
---|
| 930 | int rank;
|
---|
| 931 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
---|
| 932 | assert(comm != MPI_COMM_NULL);
|
---|
| 933 | MPI_Comm_rank(comm, &rank);
|
---|
| 934 | return rank;
|
---|
| 935 | }
|
---|
| 936 |
|
---|
| 937 | int CommMPI::Size(const Grid& grid) const
|
---|
| 938 | {
|
---|
| 939 | int size;
|
---|
| 940 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
---|
| 941 | assert(comm != MPI_COMM_NULL);
|
---|
| 942 | MPI_Comm_size(comm, &size);
|
---|
| 943 | return size;
|
---|
| 944 | }
|
---|
| 945 |
|
---|
| 946 | Index CommMPI::Pos(const Grid& grid) const
|
---|
| 947 | {
|
---|
| 948 | Index dims, periods, coords;
|
---|
| 949 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
---|
| 950 | assert(comm != MPI_COMM_NULL);
|
---|
| 951 | MPI_Cart_get(comm, 3, dims.vec(), periods.vec(), coords.vec());
|
---|
| 952 | return coords;
|
---|
| 953 | }
|
---|
| 954 |
|
---|
| 955 | Index CommMPI::Procs(const Grid& grid) const
|
---|
| 956 | {
|
---|
| 957 | Index dims, periods, coords;
|
---|
| 958 | MPI_Comm comm = settings.CommunicatorLocal(grid);
|
---|
| 959 | assert(comm != MPI_COMM_NULL);
|
---|
| 960 | MPI_Cart_get(comm, 3, dims.vec(), periods.vec(), coords.vec());
|
---|
| 961 | return dims;
|
---|
| 962 | }
|
---|
| 963 |
|
---|
| 964 | void CommMPI::InitCommMPI(const MPI_Comm& comm)
|
---|
| 965 | {
|
---|
| 966 | int status, size, rank;
|
---|
| 967 | int dims[3] = {0, 0, 0};
|
---|
| 968 | int periods[3];
|
---|
| 969 |
|
---|
| 970 | for (int i=0; i<3; ++i)
|
---|
| 971 | periods[i] = (BoundaryConditions()[i] == Periodic ? 1 : 0);
|
---|
| 972 |
|
---|
| 973 | MPI_Comm_size(comm, &size);
|
---|
| 974 | MPI_Comm_rank(comm, &rank);
|
---|
| 975 |
|
---|
| 976 | MPI_Topo_test(comm, &status);
|
---|
| 977 |
|
---|
| 978 | if (status == MPI_CART) {
|
---|
| 979 |
|
---|
| 980 | comm_global = comm;
|
---|
| 981 |
|
---|
| 982 | }else {
|
---|
| 983 |
|
---|
| 984 | const int log2 = Helper::log_2(size);
|
---|
| 985 |
|
---|
| 986 | if (Helper::intpow(2, log2) == size) {
|
---|
| 987 | for (int i=0; i<3; ++i)
|
---|
| 988 | dims[i] = Helper::intpow(2, log2 / 3 + (log2%3 > i ? 1 : 0));
|
---|
| 989 | }else {
|
---|
| 990 | MPI_Dims_create(size, 3, dims);
|
---|
| 991 | }
|
---|
| 992 |
|
---|
| 993 | #ifdef OUTPUT_DEBUG
|
---|
| 994 | if (rank == 0)
|
---|
| 995 | std::printf("Process grid: %d %d %d\n", dims[0], dims[1], dims[2]);
|
---|
| 996 | #endif
|
---|
| 997 |
|
---|
| 998 | MPI_Cart_create(comm, 3, dims, periods, 1, &comm_global);
|
---|
| 999 |
|
---|
| 1000 | }
|
---|
| 1001 |
|
---|
| 1002 | MPI_Info_create(&info);
|
---|
| 1003 | char key[] = "no_locks";
|
---|
| 1004 | char val[] = "true";
|
---|
| 1005 | MPI_Info_set(info, key, val);
|
---|
| 1006 |
|
---|
| 1007 | }
|
---|
| 1008 |
|
---|
| 1009 | CommMPI::~CommMPI()
|
---|
| 1010 | {
|
---|
| 1011 | MPI_Comm_free(&comm_global);
|
---|
| 1012 | #ifdef VMG_ONE_SIDED
|
---|
| 1013 | if (win_created)
|
---|
| 1014 | MPI_Win_free(&win);
|
---|
| 1015 | #endif
|
---|
| 1016 | MPI_Info_free(&info);
|
---|
| 1017 | }
|
---|
| 1018 |
|
---|
| 1019 | Grid& CommMPI::GetCoarserGrid(Multigrid& multigrid)
|
---|
| 1020 | {
|
---|
| 1021 | return settings.CoarserGrid(multigrid(multigrid.Level()));
|
---|
| 1022 | }
|
---|
| 1023 |
|
---|
| 1024 | Grid& CommMPI::GetFinerGrid(Multigrid& multigrid)
|
---|
| 1025 | {
|
---|
| 1026 | return settings.FinerGrid(multigrid(multigrid.Level()));
|
---|
| 1027 | }
|
---|
| 1028 |
|
---|
| 1029 | std::string CommMPI::CreateOutputDirectory()
|
---|
| 1030 | {
|
---|
| 1031 | #ifdef HAVE_BOOST_FILESYSTEM_PATH_HPP
|
---|
| 1032 | std::string path, unique_path;
|
---|
| 1033 | std::stringstream unique_suffix;
|
---|
| 1034 | int suffix_counter = 0;
|
---|
| 1035 | char buffer[129];
|
---|
| 1036 | time_t rawtime;
|
---|
| 1037 | struct tm *timeinfo;
|
---|
| 1038 | int path_size;
|
---|
| 1039 | char* path_array;
|
---|
| 1040 |
|
---|
| 1041 | if (GlobalRank() == 0) {
|
---|
| 1042 |
|
---|
| 1043 | time(&rawtime);
|
---|
| 1044 | timeinfo = localtime(&rawtime);
|
---|
| 1045 | strftime(buffer, 128, "./output/%Y_%m_%d_%H_%M_%S/", timeinfo);
|
---|
| 1046 | path = buffer;
|
---|
| 1047 |
|
---|
| 1048 | if (!fs::exists("output"))
|
---|
| 1049 | fs::create_directory("output");
|
---|
| 1050 |
|
---|
| 1051 | unique_path = path;
|
---|
| 1052 |
|
---|
| 1053 | while (fs::exists(unique_path.c_str())) {
|
---|
| 1054 |
|
---|
| 1055 | unique_suffix.str("");
|
---|
| 1056 | unique_suffix << "_" << suffix_counter++ << "/";
|
---|
| 1057 |
|
---|
| 1058 | unique_path = path;
|
---|
| 1059 | unique_path.replace(unique_path.size()-1, 1, unique_suffix.str());
|
---|
| 1060 |
|
---|
| 1061 | }
|
---|
| 1062 |
|
---|
| 1063 | fs::create_directory(unique_path.c_str());
|
---|
| 1064 |
|
---|
| 1065 | path_size = unique_path.size() + 1;
|
---|
| 1066 | path_array = Helper::GetCharArray(unique_path);
|
---|
| 1067 |
|
---|
| 1068 | MPI_Bcast(&path_size, 1, MPI_INT, 0, comm_global);
|
---|
| 1069 |
|
---|
| 1070 | }else {
|
---|
| 1071 |
|
---|
| 1072 | MPI_Bcast(&path_size, 1, MPI_INT, 0, comm_global);
|
---|
| 1073 | path_array = new char[path_size];
|
---|
| 1074 |
|
---|
| 1075 | }
|
---|
| 1076 |
|
---|
| 1077 | MPI_Bcast(path_array, path_size, MPI_CHAR, 0, comm_global);
|
---|
| 1078 |
|
---|
| 1079 | unique_path = path_array;
|
---|
| 1080 |
|
---|
| 1081 | delete [] path_array;
|
---|
| 1082 |
|
---|
| 1083 | return unique_path;
|
---|
| 1084 |
|
---|
| 1085 | #else
|
---|
| 1086 |
|
---|
| 1087 | return "./";
|
---|
| 1088 |
|
---|
| 1089 | #endif
|
---|
| 1090 | }
|
---|
| 1091 |
|
---|
| 1092 |
|
---|
| 1093 | #endif /* HAVE_MPI */
|
---|