source: src/comm/comm_mpi.hpp@ 177495

Last change on this file since 177495 was 177495, checked in by Julian Iseringhausen <isering@…>, 13 years ago

Implemented boundary values derived from continious problem.

  • Property mode set to 100644
File size: 6.4 KB
RevLine 
[fcf7f6]1/*
2 * vmg - a versatile multigrid solver
3 * Copyright (C) 2012 Institute for Numerical Simulation, University of Bonn
4 *
5 * vmg is free software: you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation, either version 3 of the License, or
8 * (at your option) any later version.
9 *
10 * vmg is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
14 *
15 * You should have received a copy of the GNU General Public License
16 * along with this program. If not, see <http://www.gnu.org/licenses/>.
17 */
18
[dfed1c]19/**
20 * @file comm_mpi.hpp
21 * @author Julian Iseringhausen <isering@ins.uni-bonn.de>
22 * @date Wed Jun 16 13:21:06 2010
23 *
24 * @brief Class for MPI-based communication.
25 *
26 */
27
28#ifndef COMM_MPI_HPP_
29#define COMM_MPI_HPP_
30
31#ifndef HAVE_MPI
32#error You need MPI in order to compile CommMPI
33#endif
34
35#include <cstdarg>
36#include <cstdio>
37#include <list>
38#include <map>
39#include <vector>
40
41#include "base/has_tempgrids.hpp"
42#include "comm/comm.hpp"
[894a5f]43#include "comm/mpi/settings.hpp"
[dfed1c]44#include "comm/mpi/has_request_vec.hpp"
45
46namespace VMG
47{
48
49class DomainDecomposition;
50class GridIteratorSet;
51class TempGrid;
52
[32ff22]53class CommMPI : public Comm, public HasTempGrids, public HasRequestVec
[dfed1c]54{
55public:
[b57b9b]56 CommMPI(const Boundary& boundary, DomainDecomposition* domain_dec, const MPI_Comm& mpi_comm, bool register_ = true) :
57 Comm(boundary, domain_dec, register_)
[ac6d04]58#ifdef VMG_ONE_SIDED
59 ,win_created(false)
60#endif
[dfed1c]61 {
62 InitCommMPI(mpi_comm);
63 }
64
[b57b9b]65 CommMPI(const Boundary& boundary, DomainDecomposition* domain_dec, bool register_ = true) :
66 Comm(boundary, domain_dec, register_)
[ac6d04]67#ifdef VMG_ONE_SIDED
68 ,win_created(false)
69#endif
[dfed1c]70 {
71 InitCommMPI(MPI_COMM_WORLD);
72 }
73
74 virtual ~CommMPI();
75
76 Grid& GetCoarserGrid(Multigrid& multigrid);
77 Grid& GetFinerGrid(Multigrid& multigrid);
78
79 void CommFromGhosts(Grid& grid);
80 void CommToGhosts(Grid& grid);
[ac6d04]81 void CommSubgrid(Grid& grid_old, Grid& grid_new, const int& direction);
82 void CommAddSubgrid(Grid& grid_old, Grid& grid_new, const int& direction);
[177495]83 std::vector<BoundaryValue> CommBoundaryValues();
[dfed1c]84
[894a5f]85 void CommToGhostsAsyncStart(Grid& grid);
[ac6d04]86 void CommToGhostsAsyncFinish(Grid& grid);
[894a5f]87 void CommFromGhostsAsyncStart(Grid& grid);
88 void CommFromGhostsAsyncFinish(Grid& grid);
89
[2a5451]90 void Barrier();
91
[ef94e7]92 vmg_float GlobalSum(vmg_float value);
93 vmg_float GlobalSumRoot(vmg_float value);
[dfed1c]94 void GlobalSumArray(vmg_float* array, const vmg_int& size);
[ef94e7]95 vmg_float GlobalMax(vmg_float value);
96 vmg_float GlobalMaxRoot(vmg_float value);
[b51c3b]97 void GlobalMaxArray(vmg_float* array, const vmg_int& size);
[2a5451]98 void GlobalBroadcast(vmg_float& value);
99 void GlobalGather(vmg_float& value, vmg_float* array);
[b51c3b]100
[ef94e7]101 vmg_int GlobalSum(vmg_int value);
102 vmg_int GlobalSumRoot(vmg_int value);
[2a5451]103 void GlobalSumArray(vmg_int* array, const vmg_int& size);
[ef94e7]104 vmg_int GlobalMax(vmg_int value);
105 vmg_int GlobalMaxRoot(vmg_int value);
[b51c3b]106 void GlobalMaxArray(vmg_int* array, const vmg_int& size);
[2a5451]107 void GlobalBroadcast(vmg_int& value);
108 void GlobalGather(vmg_int& value, vmg_int* array);
109
110 void GlobalBroadcast(char* str);
[b51c3b]111
[ef94e7]112 vmg_float LevelSum(const Grid& grid, vmg_float value);
113 vmg_float LevelSumRoot(const Grid& grid, vmg_float value);
[ac6d04]114 void LevelSumArray(const Grid& grid, vmg_float* array, const vmg_int& size);
115
[ef94e7]116 vmg_int LevelSum(const Grid& grid, vmg_int value);
117 vmg_int LevelSumRoot(const Grid& grid, vmg_int value);
[ac6d04]118 void LevelSumArray(const Grid& grid, vmg_int* array, const vmg_int& size);
119
[d13e27]120 void Print(const OutputLevel level, const char* format, ...);
121 void PrintOnce(const OutputLevel level, const char* format, ...);
122
[dfed1c]123 void PrintXML(const std::string& filename, const std::string& xml_data);
124 void PrintXMLAll(const std::string& filename, const std::string& xml_data);
125 void PrintAllSettings();
126 void PrintGrid(Grid& grid, const char* information);
[ac6d04]127 void PrintDefect(Grid& sol, Grid& rhs, const char* information);
[dfed1c]128
[ac6d04]129 virtual int GlobalRank() const;
130 virtual int GlobalSize() const;
131 virtual Index GlobalPos() const;
132 virtual Index GlobalProcs() const;
[dfed1c]133
[ac6d04]134 virtual int Rank(const Grid& grid) const;
135 virtual int Size(const Grid& grid) const;
136 virtual Index Pos(const Grid& grid) const;
137 virtual Index Procs(const Grid& grid) const;
[dfed1c]138
[894a5f]139 void PostInit(Multigrid& sol, Multigrid& rhs)
140 {
141 settings.ComputeSettings(sol, rhs, comm_global);
142 }
143
[dfed1c]144 virtual void DebugPrintError(const Grid& sol, const char* information) {}
145 virtual void DebugPrintErrorNorm(Grid& sol) {}
146 virtual void DebugPrintGridStructure(Multigrid& multigrid) {}
147
148private:
149 void InitCommMPI(const MPI_Comm& comm);
150
151 void CreateOutputFiles(const Grid& grid, const std::stringstream& serial_data, const char* information,
152 const Index& begin_global, const Index& end_global,
[ac6d04]153 const Index& begin_local, const Index& end_local,
154 const int& output_count);
[dfed1c]155
156 void CreateParallelOutputFile(const Grid& grid, MPI_Comm& comm,
157 const int& output_count, const char* information,
158 const Index& begin_global, const Index& end_global,
159 const Index& begin_local, const Index& end_local);
160
161 MPI_File CreateSerialOutputFile(const Grid& grid, MPI_Comm& comm,
162 const int& output_count, const char* information,
163 const Index& begin_global, const Index& end_global,
164 const Index& begin_local, const Index& end_local);
165
166 void FinalizeSerialOutputFile(MPI_File& file);
167
168 std::string CreateOutputDirectory();
169
170protected:
[894a5f]171 void IsendAll(Grid& grid, std::vector<VMG::MPI::Datatype>& types, const MPI_Comm& comm, const int& tag_start);
172 void IrecvAll(Grid& grid, std::vector<VMG::MPI::Datatype>& types, const MPI_Comm& comm, const int& tag_start);
[ac6d04]173
174 void IsendAllBuffered(const Grid& grid, std::vector<VMG::MPI::Datatype>& types, const MPI_Comm& comm, const int& tag_start);
[894a5f]175 void IrecvAllBuffered(std::vector<VMG::MPI::Datatype>& types, const MPI_Comm& comm, const int& tag_start);
[ac6d04]176
177 void ReplaceBufferAll(Grid& grid, const std::vector<VMG::MPI::Datatype>& types);
178 void AddBufferAll(Grid& grid, const std::vector<VMG::MPI::Datatype>& types);
179
180 void PrintGridInformation(const Grid& grid, char* filename, const std::string& name);
[0bd47e]181 void PrintDatatypes(char* filename);
[894a5f]182
183 VMG::MPI::Settings settings;
[f003a9]184
185 MPI_Comm comm_global;
186 MPI_Info info;
187
188#ifdef VMG_ONE_SIDED
189 bool win_created;
190 MPI_Win win;
191#endif
192
[dfed1c]193};
194
195}
196
197#endif /* COMM_MPI_HPP_ */
Note: See TracBrowser for help on using the repository browser.