source: src/comm/comm_mpi.hpp@ d6a338

Last change on this file since d6a338 was 0bd47e, checked in by Julian Iseringhausen <isering@…>, 13 years ago

Added MPI classes output.

  • Property mode set to 100644
File size: 6.4 KB
RevLine 
[fcf7f6]1/*
2 * vmg - a versatile multigrid solver
3 * Copyright (C) 2012 Institute for Numerical Simulation, University of Bonn
4 *
5 * vmg is free software: you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation, either version 3 of the License, or
8 * (at your option) any later version.
9 *
10 * vmg is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
14 *
15 * You should have received a copy of the GNU General Public License
16 * along with this program. If not, see <http://www.gnu.org/licenses/>.
17 */
18
[dfed1c]19/**
20 * @file comm_mpi.hpp
21 * @author Julian Iseringhausen <isering@ins.uni-bonn.de>
22 * @date Wed Jun 16 13:21:06 2010
23 *
24 * @brief Class for MPI-based communication.
25 *
26 */
27
28#ifndef COMM_MPI_HPP_
29#define COMM_MPI_HPP_
30
31#ifndef HAVE_MPI
32#error You need MPI in order to compile CommMPI
33#endif
34
35#include <cstdarg>
36#include <cstdio>
37#include <list>
38#include <map>
39#include <vector>
40
41#include "base/has_tempgrids.hpp"
42#include "comm/comm.hpp"
[894a5f]43#include "comm/mpi/settings.hpp"
[dfed1c]44#include "comm/mpi/has_request_vec.hpp"
45
46namespace VMG
47{
48
49class DomainDecomposition;
50class GridIteratorSet;
51class TempGrid;
52
[32ff22]53class CommMPI : public Comm, public HasTempGrids, public HasRequestVec
[dfed1c]54{
55public:
[b57b9b]56 CommMPI(const Boundary& boundary, DomainDecomposition* domain_dec, const MPI_Comm& mpi_comm, bool register_ = true) :
57 Comm(boundary, domain_dec, register_)
[ac6d04]58#ifdef VMG_ONE_SIDED
59 ,win_created(false)
60#endif
[dfed1c]61 {
62 InitCommMPI(mpi_comm);
63 }
64
[b57b9b]65 CommMPI(const Boundary& boundary, DomainDecomposition* domain_dec, bool register_ = true) :
66 Comm(boundary, domain_dec, register_)
[ac6d04]67#ifdef VMG_ONE_SIDED
68 ,win_created(false)
69#endif
[dfed1c]70 {
71 InitCommMPI(MPI_COMM_WORLD);
72 }
73
74 virtual ~CommMPI();
75
76 Grid& GetCoarserGrid(Multigrid& multigrid);
77 Grid& GetFinerGrid(Multigrid& multigrid);
78
79 void CommFromGhosts(Grid& grid);
80 void CommToGhosts(Grid& grid);
[ac6d04]81 void CommSubgrid(Grid& grid_old, Grid& grid_new, const int& direction);
82 void CommAddSubgrid(Grid& grid_old, Grid& grid_new, const int& direction);
[dfed1c]83
[894a5f]84 void CommToGhostsAsyncStart(Grid& grid);
[ac6d04]85 void CommToGhostsAsyncFinish(Grid& grid);
[894a5f]86 void CommFromGhostsAsyncStart(Grid& grid);
87 void CommFromGhostsAsyncFinish(Grid& grid);
88
[2a5451]89 void Barrier();
90
[ef94e7]91 vmg_float GlobalSum(vmg_float value);
92 vmg_float GlobalSumRoot(vmg_float value);
[dfed1c]93 void GlobalSumArray(vmg_float* array, const vmg_int& size);
[ef94e7]94 vmg_float GlobalMax(vmg_float value);
95 vmg_float GlobalMaxRoot(vmg_float value);
[b51c3b]96 void GlobalMaxArray(vmg_float* array, const vmg_int& size);
[2a5451]97 void GlobalBroadcast(vmg_float& value);
98 void GlobalGather(vmg_float& value, vmg_float* array);
[b51c3b]99
[ef94e7]100 vmg_int GlobalSum(vmg_int value);
101 vmg_int GlobalSumRoot(vmg_int value);
[2a5451]102 void GlobalSumArray(vmg_int* array, const vmg_int& size);
[ef94e7]103 vmg_int GlobalMax(vmg_int value);
104 vmg_int GlobalMaxRoot(vmg_int value);
[b51c3b]105 void GlobalMaxArray(vmg_int* array, const vmg_int& size);
[2a5451]106 void GlobalBroadcast(vmg_int& value);
107 void GlobalGather(vmg_int& value, vmg_int* array);
108
109 void GlobalBroadcast(char* str);
[b51c3b]110
[ef94e7]111 vmg_float LevelSum(const Grid& grid, vmg_float value);
112 vmg_float LevelSumRoot(const Grid& grid, vmg_float value);
[ac6d04]113 void LevelSumArray(const Grid& grid, vmg_float* array, const vmg_int& size);
114
[ef94e7]115 vmg_int LevelSum(const Grid& grid, vmg_int value);
116 vmg_int LevelSumRoot(const Grid& grid, vmg_int value);
[ac6d04]117 void LevelSumArray(const Grid& grid, vmg_int* array, const vmg_int& size);
118
[d13e27]119 void Print(const OutputLevel level, const char* format, ...);
120 void PrintOnce(const OutputLevel level, const char* format, ...);
121
[dfed1c]122 void PrintXML(const std::string& filename, const std::string& xml_data);
123 void PrintXMLAll(const std::string& filename, const std::string& xml_data);
124 void PrintAllSettings();
125 void PrintGrid(Grid& grid, const char* information);
[ac6d04]126 void PrintDefect(Grid& sol, Grid& rhs, const char* information);
[dfed1c]127
[ac6d04]128 virtual int GlobalRank() const;
129 virtual int GlobalSize() const;
130 virtual Index GlobalPos() const;
131 virtual Index GlobalProcs() const;
[dfed1c]132
[ac6d04]133 virtual int Rank(const Grid& grid) const;
134 virtual int Size(const Grid& grid) const;
135 virtual Index Pos(const Grid& grid) const;
136 virtual Index Procs(const Grid& grid) const;
[dfed1c]137
[894a5f]138 void PostInit(Multigrid& sol, Multigrid& rhs)
139 {
140 settings.ComputeSettings(sol, rhs, comm_global);
141 }
142
[dfed1c]143 virtual void DebugPrintError(const Grid& sol, const char* information) {}
144 virtual void DebugPrintErrorNorm(Grid& sol) {}
145 virtual void DebugPrintGridStructure(Multigrid& multigrid) {}
146
147private:
148 void InitCommMPI(const MPI_Comm& comm);
149
150 void CreateOutputFiles(const Grid& grid, const std::stringstream& serial_data, const char* information,
151 const Index& begin_global, const Index& end_global,
[ac6d04]152 const Index& begin_local, const Index& end_local,
153 const int& output_count);
[dfed1c]154
155 void CreateParallelOutputFile(const Grid& grid, MPI_Comm& comm,
156 const int& output_count, const char* information,
157 const Index& begin_global, const Index& end_global,
158 const Index& begin_local, const Index& end_local);
159
160 MPI_File CreateSerialOutputFile(const Grid& grid, MPI_Comm& comm,
161 const int& output_count, const char* information,
162 const Index& begin_global, const Index& end_global,
163 const Index& begin_local, const Index& end_local);
164
165 void FinalizeSerialOutputFile(MPI_File& file);
166
167 std::string CreateOutputDirectory();
168
169protected:
[894a5f]170 void IsendAll(Grid& grid, std::vector<VMG::MPI::Datatype>& types, const MPI_Comm& comm, const int& tag_start);
171 void IrecvAll(Grid& grid, std::vector<VMG::MPI::Datatype>& types, const MPI_Comm& comm, const int& tag_start);
[ac6d04]172
173 void IsendAllBuffered(const Grid& grid, std::vector<VMG::MPI::Datatype>& types, const MPI_Comm& comm, const int& tag_start);
[894a5f]174 void IrecvAllBuffered(std::vector<VMG::MPI::Datatype>& types, const MPI_Comm& comm, const int& tag_start);
[ac6d04]175
176 void ReplaceBufferAll(Grid& grid, const std::vector<VMG::MPI::Datatype>& types);
177 void AddBufferAll(Grid& grid, const std::vector<VMG::MPI::Datatype>& types);
178
179 void PrintGridInformation(const Grid& grid, char* filename, const std::string& name);
[0bd47e]180 void PrintDatatypes(char* filename);
[894a5f]181
182 VMG::MPI::Settings settings;
[f003a9]183
184 MPI_Comm comm_global;
185 MPI_Info info;
186
187#ifdef VMG_ONE_SIDED
188 bool win_created;
189 MPI_Win win;
190#endif
191
[dfed1c]192};
193
194}
195
196#endif /* COMM_MPI_HPP_ */
Note: See TracBrowser for help on using the repository browser.