source: src/comm/comm_mpi.cpp@ e271f0

Last change on this file since e271f0 was fcf7f6, checked in by Julian Iseringhausen <isering@…>, 14 years ago

vmg: Added license files and headers (GPLv3).

git-svn-id: https://svn.version.fz-juelich.de/scafacos/trunk@1812 5161e1c8-67bf-11de-9fd5-51895aff932f

  • Property mode set to 100644
File size: 29.1 KB
Line 
1/*
2 * vmg - a versatile multigrid solver
3 * Copyright (C) 2012 Institute for Numerical Simulation, University of Bonn
4 *
5 * vmg is free software: you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation, either version 3 of the License, or
8 * (at your option) any later version.
9 *
10 * vmg is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
14 *
15 * You should have received a copy of the GNU General Public License
16 * along with this program. If not, see <http://www.gnu.org/licenses/>.
17 */
18
19/**
20 * @file comm_mpi.cpp
21 * @author Julian Iseringhausen <isering@ins.uni-bonn.de>
22 * @date Wed Jun 16 13:21:06 2010
23 *
24 * @brief Class for MPI-based communication.
25 *
26 */
27
28#ifdef HAVE_CONFIG_H
29#include <config.h>
30#endif
31
32#ifdef HAVE_MPI
33
34#include <mpi.h>
35#ifdef HAVE_MARMOT
36#include <enhancempicalls.h>
37#include <sourceinfompicalls.h>
38#endif
39
40#ifdef HAVE_BOOST_FILESYSTEM
41#include <boost/filesystem.hpp>
42namespace fs = boost::filesystem;
43#endif
44
45#ifdef HAVE_VTK
46#include <vtkAbstractArray.h>
47#include <vtkImageData.h>
48#include <vtkPointData.h>
49#include <vtkSmartPointer.h>
50#include <vtkXMLImageDataWriter.h>
51#endif
52
53#include <cstring>
54#include <sstream>
55
56#include "base/helper.hpp"
57#include "base/tuple.hpp"
58#include "comm/comm_mpi.hpp"
59#include "comm/mpi/datatypes_local.hpp"
60#include "grid/grid.hpp"
61#include "grid/multigrid.hpp"
62#include "grid/tempgrid.hpp"
63#include "mg.hpp"
64#include "base/timer.hpp"
65
66static char print_buffer[512];
67
68using namespace VMG;
69
70void CommMPI::IsendAll(Grid& grid, std::vector<VMG::MPI::Datatype>& types, const MPI_Comm& comm, const int& tag_start)
71{
72 for (std::vector<VMG::MPI::Datatype>::const_iterator iter=types.begin(); iter!=types.end(); ++iter)
73 iter->Isend(grid, tag_start, comm, Request());
74}
75
76void CommMPI::IrecvAll(Grid& grid, std::vector<VMG::MPI::Datatype>& types, const MPI_Comm& comm, const int& tag_start)
77{
78 for (std::vector<VMG::MPI::Datatype>::const_iterator iter=types.begin(); iter!=types.end(); ++iter)
79 iter->Irecv(grid, tag_start, comm, Request());
80}
81
82void CommMPI::IsendAllBuffered(const Grid& grid, std::vector<VMG::MPI::Datatype>& types, const MPI_Comm& comm, const int& tag_start)
83{
84 for (std::vector<VMG::MPI::Datatype>::iterator iter=types.begin(); iter!=types.end(); ++iter)
85 iter->IsendBuffered(grid, tag_start, comm, Request());
86}
87
88void CommMPI::IrecvAllBuffered(std::vector<VMG::MPI::Datatype>& types, const MPI_Comm& comm, const int& tag_start)
89{
90 for (std::vector<VMG::MPI::Datatype>::iterator iter=types.begin(); iter!=types.end(); ++iter)
91 iter->IrecvBuffered(tag_start, comm, Request());
92}
93
94void CommMPI::ReplaceBufferAll(Grid& grid, const std::vector<VMG::MPI::Datatype>& types)
95{
96 for (std::vector<VMG::MPI::Datatype>::const_iterator iter=types.begin(); iter!= types.end(); ++iter)
97 iter->GridReplace(grid);
98}
99
100void CommMPI::AddBufferAll(Grid& grid, const std::vector<VMG::MPI::Datatype>& types)
101{
102 for (std::vector<VMG::MPI::Datatype>::const_iterator iter=types.begin(); iter!= types.end(); ++iter)
103 iter->GridSum(grid);
104}
105
106void CommMPI::CommSubgrid(Grid& grid_old, Grid& grid_new, const int& direction)
107{
108 MPI_Comm comm = settings.CommunicatorGlobal(grid_old);
109 if (comm != MPI_COMM_NULL) {
110 VMG::MPI::DatatypesGlobal& datatypes = settings.DatatypesGlobal(grid_old, grid_new, direction);
111 IrecvAllBuffered(datatypes.Receive(), comm, 0411);
112 IsendAllBuffered(grid_old, datatypes.Send(), comm, 0411);
113 WaitAll();
114 ReplaceBufferAll(grid_new, datatypes.Receive());
115 }
116}
117
118void CommMPI::CommAddSubgrid(Grid& grid_old, Grid& grid_new, const int& direction)
119{
120 MPI_Comm comm = settings.CommunicatorGlobal(grid_old);
121 if (comm != MPI_COMM_NULL) {
122 VMG::MPI::DatatypesGlobal& datatypes = settings.DatatypesGlobal(grid_old, grid_new, direction);
123 IrecvAllBuffered(datatypes.Receive(), comm, 1806);
124 IsendAllBuffered(grid_old, datatypes.Send(), comm, 1806);
125 WaitAll();
126 AddBufferAll(grid_new, datatypes.Receive());
127 }
128}
129
130void CommMPI::CommToGhosts(Grid& grid)
131{
132 MPI_Comm comm = settings.CommunicatorLocal(grid);
133 if (comm != MPI_COMM_NULL) {
134 VMG::MPI::DatatypesLocal& types = settings.DatatypesLocal(grid);
135 IrecvAllBuffered(types.Halo(), comm, 2310);
136 IsendAllBuffered(grid, types.NB(), comm, 2310);
137 WaitAll();
138 ReplaceBufferAll(grid, types.Halo());
139 }
140}
141
142void CommMPI::CommToGhostsAsyncStart(Grid& grid)
143{
144 MPI_Comm comm = settings.CommunicatorLocal(grid);
145 if (comm != MPI_COMM_NULL) {
146 VMG::MPI::DatatypesLocal& types = settings.DatatypesLocal(grid);
147 IrecvAllBuffered(types.Halo(), comm, 2412);
148 IsendAllBuffered(grid, types.NB(), comm, 2412);
149 TestAll();
150 }
151}
152
153void CommMPI::CommToGhostsAsyncFinish(Grid& grid)
154{
155 WaitAll();
156 ReplaceBufferAll(grid, settings.DatatypesLocal(grid).Halo());
157}
158
159void CommMPI::CommFromGhosts(Grid& grid)
160{
161 MPI_Comm comm = settings.CommunicatorLocal(grid);
162 if (comm != MPI_COMM_NULL) {
163 VMG::MPI::DatatypesLocal& types = settings.DatatypesLocal(grid);
164 IrecvAllBuffered(types.NB(), comm, 1337);
165 IsendAllBuffered(grid, types.Halo(), comm, 1337);
166 WaitAll();
167 AddBufferAll(grid, types.NB());
168 }
169}
170
171void CommMPI::CommFromGhostsAsyncStart(Grid& grid)
172{
173 MPI_Comm comm = settings.CommunicatorLocal(grid);
174 if (comm != MPI_COMM_NULL) {
175 VMG::MPI::DatatypesLocal& types = settings.DatatypesLocal(grid);
176 IrecvAllBuffered(types.NB(), comm, 0xc0ffee);
177 IsendAllBuffered(grid, types.Halo(), comm, 0xc0ffee);
178 TestAll();
179 }
180}
181
182void CommMPI::CommFromGhostsAsyncFinish(Grid& grid)
183{
184 WaitAll();
185 AddBufferAll(grid, settings.DatatypesLocal(grid).NB());
186}
187
188vmg_float CommMPI::GlobalSum(const vmg_float& value)
189{
190 vmg_float result = value;
191 MPI_Allreduce(MPI_IN_PLACE, &result, 1, MPI_DOUBLE, MPI_SUM, comm_global);
192 return result;
193}
194
195vmg_float CommMPI::GlobalSumRoot(const vmg_float& value)
196{
197 vmg_float recv_buffer = value;
198 vmg_float send_buffer = value;
199 MPI_Reduce(&send_buffer, &recv_buffer, 1, MPI_DOUBLE, MPI_SUM, 0, comm_global);
200 return recv_buffer;
201}
202
203void CommMPI::GlobalSumArray(vmg_float* array, const vmg_int& size)
204{
205 MPI_Allreduce(MPI_IN_PLACE, array, size, MPI_DOUBLE, MPI_SUM, comm_global);
206}
207
208vmg_int CommMPI::GlobalSum(const vmg_int& value)
209{
210 vmg_int result = value;
211 MPI_Allreduce(MPI_IN_PLACE, &result, 1, MPI_INT, MPI_SUM, comm_global);
212 return result;
213}
214
215vmg_int CommMPI::GlobalSumRoot(const vmg_int& value)
216{
217 vmg_int recv_buffer = value;
218 vmg_int send_buffer = value;
219 MPI_Reduce(&send_buffer, &recv_buffer, 1, MPI_INT, MPI_SUM, 0, comm_global);
220 return recv_buffer;
221}
222
223void CommMPI::GlobalSumArray(vmg_int* array, const vmg_int& size)
224{
225 MPI_Allreduce(MPI_IN_PLACE, array, size, MPI_INT, MPI_SUM, comm_global);
226}
227
228vmg_float CommMPI::GlobalMax(const vmg_float& value)
229{
230 vmg_float result = value;
231 MPI_Allreduce(MPI_IN_PLACE, &result, 1, MPI_DOUBLE, MPI_MAX, comm_global);
232 return result;
233}
234
235vmg_float CommMPI::GlobalMaxRoot(const vmg_float& value)
236{
237 vmg_float recv_buffer = value;
238 vmg_float send_buffer = value;
239 MPI_Reduce(&send_buffer, &recv_buffer, 1, MPI_DOUBLE, MPI_MAX, 0, comm_global);
240 return recv_buffer;
241}
242
243void CommMPI::GlobalMaxArray(vmg_float* array, const vmg_int& size)
244{
245 MPI_Allreduce(MPI_IN_PLACE, array, size, MPI_DOUBLE, MPI_MAX, comm_global);
246}
247
248vmg_int CommMPI::GlobalMax(const vmg_int& value)
249{
250 vmg_int result = value;
251 MPI_Allreduce(MPI_IN_PLACE, &result, 1, MPI_INT, MPI_MAX, comm_global);
252 return result;
253}
254
255vmg_int CommMPI::GlobalMaxRoot(const vmg_int& value)
256{
257 vmg_int recv_buffer = value;
258 vmg_int send_buffer = value;
259 MPI_Reduce(&send_buffer, &recv_buffer, 1, MPI_INT, MPI_MAX, 0, comm_global);
260 return recv_buffer;
261}
262
263void CommMPI::GlobalMaxArray(vmg_int* array, const vmg_int& size)
264{
265 MPI_Allreduce(MPI_IN_PLACE, array, size, MPI_INT, MPI_MAX, comm_global);
266}
267
268vmg_float CommMPI::LevelSum(const Grid& grid, const vmg_float& value)
269{
270 vmg_float result = value;
271 MPI_Comm comm = settings.CommunicatorLocal(grid);
272 assert(comm != MPI_COMM_NULL);
273 MPI_Allreduce(MPI_IN_PLACE, &result, 1, MPI_DOUBLE, MPI_SUM, comm);
274 return result;
275}
276
277vmg_float CommMPI::LevelSumRoot(const Grid& grid, const vmg_float& value)
278{
279 vmg_float recv_buffer = value;
280 vmg_float send_buffer = value;
281 MPI_Comm comm = settings.CommunicatorLocal(grid);
282 assert(comm != MPI_COMM_NULL);
283 MPI_Reduce(&send_buffer, &recv_buffer, 1, MPI_DOUBLE, MPI_SUM, 0, comm);
284 return recv_buffer;
285}
286
287void CommMPI::LevelSumArray(const Grid& grid, vmg_float* array, const vmg_int& size)
288{
289 MPI_Comm comm = settings.CommunicatorLocal(grid);
290 assert(comm != MPI_COMM_NULL);
291 MPI_Allreduce(MPI_IN_PLACE, array, size, MPI_DOUBLE, MPI_SUM, comm);
292}
293
294vmg_int CommMPI::LevelSum(const Grid& grid, const vmg_int& value)
295{
296 vmg_int result = value;
297 MPI_Comm comm = settings.CommunicatorLocal(grid);
298 assert(comm != MPI_COMM_NULL);
299 MPI_Allreduce(MPI_IN_PLACE, &result, 1, MPI_INT, MPI_SUM, comm);
300 return result;
301}
302
303vmg_int CommMPI::LevelSumRoot(const Grid& grid, const vmg_int& value)
304{
305 vmg_int recv_buffer = value;
306 vmg_int send_buffer = value;
307 MPI_Comm comm = settings.CommunicatorLocal(grid);
308 assert(comm != MPI_COMM_NULL);
309 MPI_Reduce(&send_buffer, &recv_buffer, 1, MPI_INT, MPI_SUM, 0, comm);
310 return recv_buffer;
311}
312
313void CommMPI::LevelSumArray(const Grid& grid, vmg_int* array, const vmg_int& size)
314{
315 MPI_Comm comm = settings.CommunicatorLocal(grid);
316 assert(comm != MPI_COMM_NULL);
317 MPI_Allreduce(MPI_IN_PLACE, array, size, MPI_INT, MPI_SUM, comm);
318}
319
320void CommMPI::PrintString(const char* format, ...)
321{
322 va_list args;
323 va_start(args, format);
324 vsprintf(print_buffer, format, args);
325 printf("VMG: Rank %d: %s\n", GlobalRank(), print_buffer);
326 va_end(args);
327}
328
329void CommMPI::PrintStringOnce(const char* format, ...)
330{
331 if (GlobalRank() == 0) {
332 va_list args;
333 va_start(args, format);
334 vsprintf(print_buffer, format, args);
335 printf("VMG: Rank %d: %s\n", GlobalRank(), print_buffer);
336 va_end(args);
337 }
338}
339
340void CommMPI::PrintXML(const std::string& filename, const std::string& xml_data)
341{
342 MPI_File file;
343 std::stringstream path, xml_header;
344
345 pugi::xml_document doc;
346 pugi::xml_node node_data = doc.append_child("Global").append_child("NumProcesses").append_child(pugi::node_pcdata);
347 node_data.set_value(Helper::ToString(GlobalProcs().Product()).c_str());
348 doc.save(xml_header);
349
350 path << OutputPath() << filename;
351
352 char* filename_array = Helper::GetCharArray(path.str());
353 char* xml_header_array = Helper::GetCharArray(xml_header.str());
354 char* str_array = Helper::GetCharArray(xml_data);
355
356 MPI_File_open(MPI_COMM_SELF, filename_array, MPI_MODE_WRONLY|MPI_MODE_CREATE, MPI_INFO_NULL, &file);
357 MPI_File_set_size(file, 0);
358 MPI_File_write(file, xml_header_array, xml_header.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
359 MPI_File_write(file, str_array, xml_data.size(), MPI_CHAR, MPI_STATUS_IGNORE);
360 MPI_File_close(&file);
361
362 delete [] filename_array;
363 delete [] xml_header_array;
364 delete [] str_array;
365}
366
367void CommMPI::PrintXMLAll(const std::string& filename, const std::string& xml_data)
368{
369 MPI_File file;
370 std::stringstream path;
371
372 path << OutputPath() << filename;
373
374 char* filename_array = Helper::GetCharArray(path.str());
375 char* str_array = Helper::GetCharArray(xml_data);
376
377 MPI_File_open(comm_global, filename_array, MPI_MODE_WRONLY|MPI_MODE_CREATE, MPI_INFO_NULL, &file);
378 MPI_File_set_size(file, 0);
379
380 if (GlobalRank() == 0) {
381 std::stringstream xml_header;
382 pugi::xml_document doc;
383 pugi::xml_node node_data = doc.append_child("Global").append_child("NumProcesses").append_child(pugi::node_pcdata);
384 node_data.set_value(Helper::ToString(GlobalProcs().Product()).c_str());
385 doc.save(xml_header);
386
387 char* xml_header_array = Helper::GetCharArray(xml_header.str());
388
389 MPI_File_write_shared(file, xml_header_array, xml_header.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
390
391 delete [] xml_header_array;
392 }
393
394 MPI_File_write_ordered(file, str_array, xml_data.size(), MPI_CHAR, MPI_STATUS_IGNORE);
395 MPI_File_close(&file);
396
397 delete [] filename_array;
398 delete [] str_array;
399}
400
401void CommMPI::PrintGridInformation(const Grid& grid, char* filename, const std::string& name)
402{
403 std::stringstream buf;
404 MPI_File file;
405 int rank, size;
406 int size_local, size_local_max;
407
408 MPI_Comm comm = settings.CommunicatorGlobal(grid);
409 MPI_Comm comm_local = settings.CommunicatorLocal(grid);
410
411 if (comm_local != MPI_COMM_NULL)
412 MPI_Comm_size(comm_local, &size_local);
413 else
414 size_local = 0;
415
416 if (comm != MPI_COMM_NULL) {
417
418 MPI_Reduce(&size_local, &size_local_max, 1, MPI_INT, MPI_MAX, 0, comm);
419
420 MPI_File_open(comm, filename, MPI_MODE_WRONLY|MPI_MODE_CREATE|MPI_MODE_APPEND, MPI_INFO_NULL, &file);
421
422 MPI_Comm_rank(comm, &rank);
423 MPI_Comm_size(comm, &size);
424
425 if (rank == 0) {
426
427 buf << "###########################################################" << std::endl
428 << "GLOBAL INFORMATION:" << std::endl
429 << " NAME: " << name << std::endl
430 << " LEVEL: " << grid.Level() << std::endl
431 << " COMM_SIZE_GLOBAL: " << size << std::endl
432 << " COMM_SIZE_LOCAL: " << size_local_max << std::endl
433 << " GLOBAL:" << std::endl
434 << " GLOBAL_FINER_BEGIN: " << grid.Global().GlobalFinerBegin() << std::endl
435 << " GLOBAL_FINER_END: " << grid.Global().GlobalFinerEnd() << std::endl
436 << " GLOBAL_FINER_SIZE: " << grid.Global().GlobalFinerSize() << std::endl
437 << " FINEST_ABS_BEGIN: " << grid.Global().FinestAbsBegin() << std::endl
438 << " FINEST_ABS_END: " << grid.Global().FinestAbsEnd() << std::endl
439 << " FINEST_ABS_SIZE: " << grid.Global().FinestAbsSize() << std::endl
440 << " GLOBAL_SIZE: " << grid.Global().GlobalSize() << std::endl
441 << " EXTENT:" << std::endl
442 << " BEGIN: " << grid.Extent().Begin() << std::endl
443 << " END: " << grid.Extent().End() << std::endl
444 << " SIZE: " << grid.Extent().Size() << std::endl
445 << " MESH_WIDTH: " << grid.Extent().MeshWidth() << std::endl
446 << std::endl
447 << "LOCAL INFORMATION:" << std::endl;
448 }
449
450 buf << "RANK " << rank << ":" << std::endl
451 << " GLOBAL:" << std::endl
452 << " LOCAL_BEGIN: " << grid.Global().LocalBegin() << std::endl
453 << " LOCAL_END: " << grid.Global().LocalEnd() << std::endl
454 << " LOCAL_SIZE: " << grid.Global().LocalSize() << std::endl
455 << " LOCAL_FINER_BEGIN: " << grid.Global().LocalFinerBegin() << std::endl
456 << " LOCAL_FINER_END: " << grid.Global().LocalFinerEnd() << std::endl
457 << " LOCAL_FINER_SIZE: " << grid.Global().LocalFinerSize() << std::endl
458 << " BOUNDARY_TYPE: " << grid.Global().BoundaryType() << std::endl
459 << " LOCAL:" << std::endl
460 << " BEGIN: " << grid.Local().Begin() << std::endl
461 << " END: " << grid.Local().End() << std::endl
462 << " SIZE: " << grid.Local().Size() << std::endl
463 << " SIZE_TOTAL: " << grid.Local().SizeTotal() << std::endl
464 << " HALO_BEGIN_1: " << grid.Local().HaloBegin1() << std::endl
465 << " HALO_END_1: " << grid.Local().HaloEnd1() << std::endl
466 << " HALO_SIZE_1: " << grid.Local().HaloSize1() << std::endl
467 << " HALO_BEGIN_2: " << grid.Local().HaloBegin2() << std::endl
468 << " HALO_END_2: " << grid.Local().HaloEnd2() << std::endl
469 << " HALO_SIZE_2: " << grid.Local().HaloSize2() << std::endl
470 << " BOUNDARY_BEGIN_1: " << grid.Local().BoundaryBegin1() << std::endl
471 << " BOUNDARY_END_1: " << grid.Local().BoundaryEnd1() << std::endl
472 << " BOUNDARY_SIZE_1: " << grid.Local().BoundarySize1() << std::endl
473 << " BOUNDARY_BEGIN_2: " << grid.Local().BoundaryBegin2() << std::endl
474 << " BOUNDARY_END_2: " << grid.Local().BoundaryEnd2() << std::endl
475 << " BOUNDARY_SIZE_2: " << grid.Local().BoundarySize2() << std::endl
476 << " FINER_BEGIN: " << grid.Local().FinerBegin() << std::endl
477 << " FINER_END: " << grid.Local().FinerEnd() << std::endl
478 << " FINER_SIZE: " << grid.Local().FinerSize() << std::endl;
479
480 if (rank == size-1)
481 buf << "###########################################################" << std::endl;
482
483 char* char_buf = Helper::GetCharArray(buf.str());
484 MPI_File_write_ordered(file, char_buf, buf.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
485 delete [] char_buf;
486
487 MPI_File_close(&file);
488
489 }
490}
491
492void CommMPI::PrintAllSettings()
493{
494 std::stringstream buf;
495 MPI_File file;
496
497 const Multigrid& mg = *MG::GetSol();
498
499 buf << OutputPath() << "settings.txt";
500 char *filename = Helper::GetCharArray(buf.str());
501
502 MPI_File_open(comm_global, filename, MPI_MODE_WRONLY|MPI_MODE_CREATE, MPI_INFO_NULL, &file);
503 MPI_File_set_size(file, 0);
504 MPI_File_close(&file);
505
506 for (int i=mg.MinLevel(); i<=mg.MaxLevel(); ++i)
507 PrintGridInformation(mg(i), filename, "MULTIGRID");
508
509 for (int i=mg.MinLevel()+1; i<=mg.MaxLevel(); ++i)
510 PrintGridInformation(settings.CoarserGrid(mg(i)), filename, "COARSER_GRID");
511
512 for (int i=mg.MinLevel(); i<mg.MaxLevel(); ++i)
513 PrintGridInformation(settings.FinerGrid(mg(i)), filename, "FINER_GRID");
514
515 delete [] filename;
516
517}
518
519void CommMPI::PrintGrid(Grid& grid, const char* information)
520{
521 int output_count = OutputCount();
522
523#ifdef HAVE_VTK
524
525 if (settings.CommunicatorLocal(grid) != MPI_COMM_NULL) {
526
527 Index end, end_global;
528
529 for (int i=0; i<3; ++i) {
530 end[i] = grid.Local().End()[i];
531 end_global[i] = grid.Global().LocalEnd()[i];
532 }
533
534 vtkSmartPointer<vtkImageData> image = vtkSmartPointer<vtkImageData>::New();
535 image->SetExtent(grid.Global().LocalBegin().X(), end_global.X()-1,
536 grid.Global().LocalBegin().Y(), end_global.Y()-1,
537 grid.Global().LocalBegin().Z(), end_global.Z()-1);
538 image->SetSpacing(grid.Extent().MeshWidth().vec());
539 image->SetOrigin(grid.Extent().Begin().vec());
540 image->SetScalarTypeToDouble();
541 image->SetNumberOfScalarComponents(1);
542 image->AllocateScalars();
543 image->GetPointData()->GetScalars()->SetName(information);
544
545 Index i;
546 for (i.X()=grid.Local().Begin().X(); i.X()<end.X(); ++i.X())
547 for (i.Y()=grid.Local().Begin().Y(); i.Y()<end.Y(); ++i.Y())
548 for (i.Z()=grid.Local().Begin().Z(); i.Z()<end.Z(); ++i.Z())
549 image->SetScalarComponentFromDouble(i.X() - grid.Local().Begin().X() + grid.Global().LocalBegin().X(),
550 i.Y() - grid.Local().Begin().Y() + grid.Global().LocalBegin().Y(),
551 i.Z() - grid.Local().Begin().Z() + grid.Global().LocalBegin().Z(),
552 0, grid.GetVal(i));
553
554 image->Update();
555
556 int rank, size;
557 MPI_Comm_rank(comm_global, &rank);
558 MPI_Comm_size(comm_global, &size);
559
560 std::stringstream filename;
561 filename << OutputPath() << std::setw(4) << std::setfill('0') << output_count << "_" << rank << ".vti";
562
563 vtkSmartPointer<vtkXMLImageDataWriter> writer = vtkSmartPointer<vtkXMLImageDataWriter>::New();
564 writer->SetFileName(filename.str().c_str());
565 writer->SetCompressorTypeToNone();
566 writer->SetDataModeToAscii();
567 writer->SetInput(image);
568 writer->Update();
569 writer->Write();
570
571 }
572
573#else /* HAVE_VTK */
574 Index i;
575 std::stringstream buf;
576
577 Index begin, end;
578 Index begin_local, end_local, begin_global, end_global;
579
580 CommToGhosts(grid);
581
582 for (int i=0; i<3; ++i) {
583 end[i] = grid.Local().End()[i] + (grid.Global().LocalEnd()[i] == grid.Global().GlobalSize()[i] ? 0 : grid.Local().HaloSize1()[i]);
584 end_local[i] = grid.Global().LocalEnd()[i] - (grid.Global().LocalEnd()[i] == grid.Global().GlobalSize()[i] ? 1 : 0);
585 }
586
587 begin = grid.Local().Begin();
588 begin_local = grid.Global().LocalBegin();
589 begin_global = 0;
590 end_global = grid.Global().GlobalSize()-1;
591
592 for (i.Z()=begin.Z(); i.Z()<end.Z(); ++i.Z())
593 for (i.Y()=begin.Y(); i.Y()<end.Y(); ++i.Y())
594 for (i.X()=begin.X(); i.X()<end.X(); ++i.X())
595 buf << std::scientific << grid.GetVal(i) << " ";
596
597 CreateOutputFiles(grid, buf, information,
598 begin_global, end_global,
599 begin_local, end_local,
600 output_count);
601#endif /* HAVE_VTK */
602}
603
604void CommMPI::PrintDefect(Grid& sol, Grid& rhs, const char* information)
605{
606 TempGrid *temp = MG::GetTempGrid();
607 temp->SetProperties(sol);
608 temp->ImportFromResidual(sol, rhs);
609 PrintGrid(*temp, information);
610}
611
612void CommMPI::CreateOutputFiles(const Grid& grid, const std::stringstream& serial_data, const char* information,
613 const Index& begin_global, const Index& end_global,
614 const Index& begin_local, const Index& end_local,
615 const int& output_count)
616{
617 MPI_Comm comm = settings.CommunicatorGlobal(grid);
618
619 if (comm != MPI_COMM_NULL) {
620
621 MPI_File file;
622 std::string conv_information = Helper::ReplaceWhitespaces(information, "_");
623
624 CreateParallelOutputFile(grid, comm, output_count, conv_information.c_str(),
625 begin_global, end_global, begin_local, end_local);
626
627 file = CreateSerialOutputFile(grid, comm, output_count, conv_information.c_str(),
628 begin_global, end_global, begin_local, end_local);
629
630 char *char_buf = Helper::GetCharArray(serial_data.str());
631 MPI_File_write(file, char_buf, serial_data.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
632 delete [] char_buf;
633
634 FinalizeSerialOutputFile(file);
635
636 }
637}
638
639void CommMPI::CreateParallelOutputFile(const Grid& grid, MPI_Comm& comm,
640 const int& output_count, const char* information,
641 const Index& begin_global, const Index& end_global,
642 const Index& begin_local, const Index& end_local)
643{
644 int rank;
645 MPI_File file;
646 char parallel_filename[513], serial_filename[513];
647 std::stringstream buf;
648
649 MPI_Comm_rank(comm, &rank);
650
651 sprintf(parallel_filename, "%s%04d.pvti", OutputPath().c_str(), output_count);
652 sprintf(serial_filename, "%04d_%d.vti", output_count, rank);
653
654 MPI_File_open(comm, parallel_filename, MPI_MODE_WRONLY|MPI_MODE_CREATE, MPI_INFO_NULL, &file);
655 MPI_File_set_size(file, 0);
656
657 if (rank == 0) {
658
659 buf << "<?xml version=\"1.0\"?>" << std::endl
660 << "<VTKFile type=\"PImageData\" version=\"0.1\" byte_order=\"LittleEndian\">" << std::endl
661 << " <PImageData WholeExtent=\"";
662
663 for (int i=0; i<3; ++i)
664 buf << begin_global[i] << " " << end_global[i] << " ";
665
666 buf << "\"" << std::endl
667 << " GhostLevel=\"0\" Origin=\"0 0 0\" Spacing=\"";
668
669 for (int i=0; i<3; ++i)
670 buf << grid.Extent().MeshWidth()[i] << " ";
671
672 buf << "\">" << std::endl
673 << " <PPointData Scalars=\"" << information << "\">" << std::endl
674 << " <PDataArray type=\"Float32\" Name=\"" << information << "\"/>" << std::endl
675 << " </PPointData>" << std::endl;
676
677 char* char_buf = Helper::GetCharArray(buf.str());
678
679 MPI_File_write_shared(file, char_buf, buf.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
680
681 delete [] char_buf;
682 }
683
684 buf.str("");
685
686 if ((end_local-begin_local).Product() > 0) {
687 buf << " <Piece Extent=\"";
688
689 for (int i=0; i<3; ++i)
690 buf << begin_local[i] << " " << end_local[i] << " ";
691
692 buf << "\" Source=\"" << serial_filename << "\"/>" << std::endl;
693 }
694
695 char* char_buf = Helper::GetCharArray(buf.str());
696
697 MPI_File_write_ordered(file, char_buf, buf.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
698
699 delete [] char_buf;
700
701 if (rank == 0) {
702
703 buf.str("");
704
705 buf << " </PImageData>" << std::endl
706 << "</VTKFile>" << std::endl;
707
708 char* char_buf = Helper::GetCharArray(buf.str());
709
710 MPI_File_write_shared(file, char_buf, buf.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
711
712 delete [] char_buf;
713 }
714
715 MPI_File_close(&file);
716}
717
718MPI_File CommMPI::CreateSerialOutputFile(const Grid& grid, MPI_Comm& comm,
719 const int& output_count, const char* information,
720 const Index& begin_global, const Index& end_global,
721 const Index& begin_local, const Index& end_local)
722{
723 char serial_filename[513];
724 int rank;
725 MPI_File file;
726 std::stringstream buf;
727
728 MPI_Comm_rank(comm_global, &rank);
729
730 sprintf(serial_filename, "%s%04d_%d.vti", OutputPath().c_str(), output_count, rank);
731
732 MPI_File_open(MPI_COMM_SELF, serial_filename, MPI_MODE_WRONLY|MPI_MODE_CREATE, MPI_INFO_NULL, &file);
733
734 buf << "<?xml version=\"1.0\"?>" << std::endl
735 << "<VTKFile type=\"ImageData\" version=\"0.1\" byte_order=\"LittleEndian\">" << std::endl
736 << " <ImageData WholeExtent=\"";
737
738 for (int i=0; i<3; ++i)
739 buf << begin_global[i] << " " << end_global[i] << " ";
740
741 buf << "\"" << std::endl
742 << " Origin=\"0 0 0\" Spacing=\"";
743
744 for (int i=0; i<3; ++i)
745 buf << grid.Extent().MeshWidth()[i] << " ";
746
747 buf << "\">" << std::endl
748 << " <Piece Extent=\"";
749
750 for (int i=0; i<3; ++i)
751 buf << begin_local[i] << " " << end_local[i] << " ";
752
753 buf << "\">" << std::endl
754 << " <PointData Scalars=\"" << information << "\">" << std::endl
755 << " <DataArray type=\"Float32\" Name=\"" << information << "\" format=\"ascii\">" << std::endl
756 << " ";
757
758 char* char_buf = Helper::GetCharArray(buf.str());
759 MPI_File_write(file, char_buf, buf.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
760 delete [] char_buf;
761
762 return file;
763}
764
765void CommMPI::FinalizeSerialOutputFile(MPI_File& file)
766{
767 std::stringstream buf;
768
769 buf << std::endl
770 << " </DataArray>" << std::endl
771 << " </PointData>" << std::endl
772 << " </Piece>" << std::endl
773 << " </ImageData>" << std::endl
774 << "</VTKFile>" << std::endl;
775
776 char* char_buf = Helper::GetCharArray(buf.str());
777 MPI_File_write(file, char_buf, buf.str().size(), MPI_CHAR, MPI_STATUS_IGNORE);
778 delete [] char_buf;
779
780 MPI_File_close(&file);
781}
782
783int CommMPI::GlobalRank() const
784{
785 int rank;
786 MPI_Comm_rank(comm_global, &rank);
787 return rank;
788}
789
790int CommMPI::GlobalSize() const
791{
792 int size;
793 MPI_Comm_size(comm_global, &size);
794 return size;
795}
796
797Index CommMPI::GlobalPos() const
798{
799 Index dims, periods, coords;
800 MPI_Cart_get(comm_global, 3, dims.vec(), periods.vec(), coords.vec());
801 return coords;
802}
803
804Index CommMPI::GlobalProcs() const
805{
806 Index dims, periods, coords;
807 MPI_Cart_get(comm_global, 3, dims.vec(), periods.vec(), coords.vec());
808 return dims;
809}
810
811int CommMPI::Rank(const Grid& grid) const
812{
813 int rank;
814 MPI_Comm comm = settings.CommunicatorLocal(grid);
815 assert(comm != MPI_COMM_NULL);
816 MPI_Comm_rank(comm, &rank);
817 return rank;
818}
819
820int CommMPI::Size(const Grid& grid) const
821{
822 int size;
823 MPI_Comm comm = settings.CommunicatorLocal(grid);
824 assert(comm != MPI_COMM_NULL);
825 MPI_Comm_size(comm, &size);
826 return size;
827}
828
829Index CommMPI::Pos(const Grid& grid) const
830{
831 Index dims, periods, coords;
832 MPI_Comm comm = settings.CommunicatorLocal(grid);
833 assert(comm != MPI_COMM_NULL);
834 MPI_Cart_get(comm, 3, dims.vec(), periods.vec(), coords.vec());
835 return coords;
836}
837
838Index CommMPI::Procs(const Grid& grid) const
839{
840 Index dims, periods, coords;
841 MPI_Comm comm = settings.CommunicatorLocal(grid);
842 assert(comm != MPI_COMM_NULL);
843 MPI_Cart_get(comm, 3, dims.vec(), periods.vec(), coords.vec());
844 return dims;
845}
846
847void CommMPI::InitCommMPI(const MPI_Comm& comm)
848{
849 int status, size, rank;
850 int dims[3] = {0, 0, 0};
851 int periods[3];
852
853 for (int i=0; i<3; ++i)
854 periods[i] = (BoundaryConditions()[i] == Periodic ? 1 : 0);
855
856 MPI_Comm_size(comm, &size);
857 MPI_Comm_rank(comm, &rank);
858
859 MPI_Topo_test(comm, &status);
860
861 if (status == MPI_CART) {
862
863 comm_global = comm;
864
865 }else {
866
867 const int log2 = Helper::log_2(size);
868
869 if (Helper::intpow(2, log2) == size) {
870 for (int i=0; i<3; ++i)
871 dims[i] = Helper::intpow(2, log2 / 3 + (log2%3 > i ? 1 : 0));
872 }else {
873 MPI_Dims_create(size, 3, dims);
874 }
875
876#ifdef DEBUG_OUTPUT
877 if (rank == 0)
878 std::printf("Dims: %d %d %d\n", dims[0], dims[1], dims[2]);
879#endif
880
881 MPI_Cart_create(comm, 3, dims, periods, 1, &comm_global);
882
883 }
884
885 MPI_Info_create(&info);
886 char key[] = "no_locks";
887 char val[] = "true";
888 MPI_Info_set(info, key, val);
889
890}
891
892CommMPI::~CommMPI()
893{
894 MPI_Comm_free(&comm_global);
895#ifdef VMG_ONE_SIDED
896 if (win_created)
897 MPI_Win_free(&win);
898#endif
899 MPI_Info_free(&info);
900}
901
902Grid& CommMPI::GetCoarserGrid(Multigrid& multigrid)
903{
904 return settings.CoarserGrid(multigrid(multigrid.Level()));
905}
906
907Grid& CommMPI::GetFinerGrid(Multigrid& multigrid)
908{
909 return settings.FinerGrid(multigrid(multigrid.Level()));
910}
911
912std::string CommMPI::CreateOutputDirectory()
913{
914#ifdef HAVE_BOOST_FILESYSTEM
915 std::string path, unique_path;
916 std::stringstream unique_suffix;
917 int suffix_counter = 0;
918 char buffer[129];
919 time_t rawtime;
920 struct tm *timeinfo;
921 int path_size;
922 char* path_array;
923
924 if (GlobalRank() == 0) {
925
926 time(&rawtime);
927 timeinfo = localtime(&rawtime);
928 strftime(buffer, 128, "./output/%Y_%m_%d_%H_%M_%S/", timeinfo);
929 path = buffer;
930
931 if (!fs::exists("output"))
932 fs::create_directory("output");
933
934 unique_path = path;
935
936 while (fs::exists(unique_path.c_str())) {
937
938 unique_suffix.str("");
939 unique_suffix << "_" << suffix_counter++ << "/";
940
941 unique_path = path;
942 unique_path.replace(unique_path.size()-1, 1, unique_suffix.str());
943
944 }
945
946 fs::create_directory(unique_path.c_str());
947
948 path_size = unique_path.size() + 1;
949 path_array = Helper::GetCharArray(unique_path);
950
951 MPI_Bcast(&path_size, 1, MPI_INT, 0, comm_global);
952
953 }else {
954
955 MPI_Bcast(&path_size, 1, MPI_INT, 0, comm_global);
956 path_array = new char[path_size];
957
958 }
959
960 MPI_Bcast(path_array, path_size, MPI_CHAR, 0, comm_global);
961
962 unique_path = path_array;
963
964 delete [] path_array;
965
966 return unique_path;
967
968#else
969
970 return "./";
971
972#endif
973}
974
975
976#endif /* HAVE_MPI */
Note: See TracBrowser for help on using the repository browser.