Changeset 2112b1


Ignore:
Timestamp:
Dec 8, 2011, 12:53:53 PM (14 years ago)
Author:
Julian Iseringhausen <isering@…>
Children:
1610dc
Parents:
138f86
Message:

Did some work on the MPI communication.

git-svn-id: https://svn.version.fz-juelich.de/scafacos/trunk@1245 5161e1c8-67bf-11de-9fd5-51895aff932f

Location:
src
Files:
2 added
4 deleted
5 edited

Legend:

Unmodified
Added
Removed
  • src/Makefile.am

    r138f86 r2112b1  
    4646        comm/mpi/comm_info.cpp \
    4747        comm/mpi/comm_info.hpp \
    48         comm/mpi/comm_key.cpp \
    49         comm/mpi/comm_key.hpp \
    50         comm/mpi/datatype.hpp \
    5148        comm/mpi/error_handler.cpp \
    5249        comm/mpi/error_handler.hpp \
    5350        comm/mpi/has_request_vec.hpp \
     51        comm/mpi/key.cpp \
     52        comm/mpi/key.hpp \
    5453        commands/com_check_consistency.cpp \
    5554        commands/com_check_iteration_counter.cpp \
  • src/comm/comm_mpi.cpp

    r138f86 r2112b1  
    2828#include "base/timer.hpp"
    2929#include "comm/comm_mpi.hpp"
    30 #include "comm/mpi/datatype.hpp"
    3130#include "grid/grid.hpp"
    3231#include "grid/multigrid.hpp"
     
    282281
    283282      if (has_halo_1[i]) {
    284 
    285         MPI_Datatype dts1 = GetDatatype(grid, grid.Iterators().Halo1()[i]);
     283        MPI_Datatype dts1 = comm_info.GetDatatypeSubarray(grid, grid.Iterators().Halo1()[i]);
    286284        MPI_Isend(&grid(0), 1, dts1, neighbors.Left(),  1, comm, &Request());
    287         MPI_Type_free(&dts1);
    288285      }
    289286
    290287      if (has_halo_2[i]) {
    291288
    292         MPI_Datatype dts2 = GetDatatype(grid, grid.Iterators().Halo2()[i]);
     289        MPI_Datatype dts2 = comm_info.GetDatatypeSubarray(grid, grid.Iterators().Halo2()[i]);
    293290        MPI_Isend(&grid(0), 1, dts2, neighbors.Right(), 2, comm, &Request());
    294         MPI_Type_free(&dts2);
    295 
    296         MPI_Datatype dtr1 = GetDatatype(grid, grid.Iterators().NearBoundary2()[i]);
     291
     292        MPI_Datatype dtr1 = comm_info.GetDatatypeSubarray(grid, grid.Iterators().NearBoundary2()[i]);
    297293        ReceiveAndAddSubgrid(grid, comm, dtr1, neighbors.Right(), 1);
    298         MPI_Type_free(&dtr1);
     294
    299295      }
    300296
    301297      if (has_halo_1[i]) {
    302 
    303         MPI_Datatype dtr2 = GetDatatype(grid, grid.Iterators().NearBoundary1()[i]);
     298        MPI_Datatype dtr2 = comm_info.GetDatatypeSubarray(grid, grid.Iterators().NearBoundary1()[i]);
    304299        ReceiveAndAddSubgrid(grid, comm, dtr2, neighbors.Left(), 2);
    305         MPI_Type_free(&dtr2);
    306 
    307300      }
    308301
     
    337330
    338331      if (has_halo_1[i]) {
    339         dts_left = GetDatatype(grid, grid.Iterators().NearBoundary1()[i]);
    340         dtr_left = GetDatatype(grid, grid.Iterators().Halo1()[i]);
     332        dts_left = comm_info.GetDatatypeSubarray(grid, grid.Iterators().NearBoundary1()[i]);
     333        dtr_left = comm_info.GetDatatypeSubarray(grid, grid.Iterators().Halo1()[i]);
    341334        num_left = 1;
    342335      }else {
     
    347340
    348341      if (has_halo_2[i]) {
    349         dts_right = GetDatatype(grid, grid.Iterators().NearBoundary2()[i]);
    350         dtr_right = GetDatatype(grid, grid.Iterators().Halo2()[i]);
     342        dts_right = comm_info.GetDatatypeSubarray(grid, grid.Iterators().NearBoundary2()[i]);
     343        dtr_right = comm_info.GetDatatypeSubarray(grid, grid.Iterators().Halo2()[i]);
    351344        num_right = 1;
    352345      }else {
     
    364357                   comm, MPI_STATUS_IGNORE);
    365358      Timer::Stop("MPI_Sendrecv");
    366 
    367       if (has_halo_1[i]) {
    368         MPI_Type_free(&dts_left);
    369         MPI_Type_free(&dtr_left);
    370       }
    371 
    372       if (has_halo_2[i]) {
    373         MPI_Type_free(&dts_right);
    374         MPI_Type_free(&dtr_right);
    375       }
    376359
    377360    }
     
    11161099}
    11171100
    1118 MPI_Datatype CommMPI::GetDatatype(const Index& start, const Index& end, const Index& grid_size)
    1119 {
    1120   MPI_Datatype type;
    1121   Index sizes = grid_size;
    1122   Index subsizes = end - start;
    1123   Index starts = start;
    1124 
    1125   MPI_Type_create_subarray(3, sizes.vec(), subsizes.vec(), starts.vec(), MPI_ORDER_C, MPI_DOUBLE, &type);
    1126 
    1127   MPI_Type_commit(&type);
    1128 
    1129   return type;
    1130 }
    1131 
    1132 MPI_Datatype CommMPI::GetDatatype(const Grid& grid, const GridIteratorSet& bounds)
    1133 {
    1134   return GetDatatype(bounds.Begin().GetBegin(), bounds.Begin().GetEnd(), grid.Local().SizeTotal());
    1135 }
    1136 
    11371101void CommMPI::CreateOutputFiles(const Grid& grid, const std::stringstream& serial_data, const char* information,
    11381102                                const Index& begin_global, const Index& end_global,
  • src/comm/comm_mpi.hpp

    r138f86 r2112b1  
    2626#include "comm/comm.hpp"
    2727#include "comm/mpi/comm_info.hpp"
    28 #include "comm/mpi/datatype.hpp"
    2928#include "comm/mpi/has_request_vec.hpp"
    3029
     
    3231{
    3332
    34 class DatatypeInfo;
    3533class DomainDecomposition;
    3634class GridIteratorSet;
     
    122120  VMG::MPI::CommInfo comm_info;
    123121
    124   std::vector<MPI_Request> request_vec;
    125 
    126122  MPI_Comm comm_global;
    127123  bool win_created;
     
    132128  void ReceiveAndAddSubgrid(Grid& grid, const MPI_Comm& comm, MPI_Datatype& type,
    133129                            const int& rank, const int& tag);
    134 
    135   MPI_Datatype GetDatatype(const Index& start, const Index& end, const Index& grid_size);
    136   MPI_Datatype GetDatatype(const Grid& grid, const GridIteratorSet& bounds);
    137130};
    138131
  • src/comm/mpi/comm_info.cpp

    r138f86 r2112b1  
    2121#include "base/index.hpp"
    2222#include "comm/mpi/comm_info.hpp"
    23 #include "comm/mpi/datatype.hpp"
    2423#include "grid/grid.hpp"
    2524#include "grid/multigrid.hpp"
     
    3332VMG::MPI::CommInfo::~CommInfo()
    3433{
    35   std::map<CommKey, MPI_Comm>::iterator iter;
    36 
    37   for (iter=communicators.begin(); iter!=communicators.end(); ++iter)
    38     if (iter->second != MPI_COMM_NULL)
    39       MPI_Comm_free(&(iter->second));
     34  std::map<Key, MPI_Comm>::iterator c_iter;
     35  for (c_iter=communicators.begin(); c_iter!=communicators.end(); ++c_iter)
     36    if (c_iter->second != MPI_COMM_NULL)
     37      MPI_Comm_free(&c_iter->second);
     38
     39  std::map<Key, MPI_Datatype>::iterator d_iter;
     40  for (d_iter=datatypes.begin(); d_iter!=datatypes.end(); ++d_iter)
     41    MPI_Type_free(&d_iter->second);
    4042}
    4143
    4244MPI_Comm VMG::MPI::CommInfo::GetCommunicator(const Grid& grid)
    4345{
    44   return GetCommunicator(grid.Global(), reinterpret_cast<const void*>(&grid));
    45 }
    46 
    47 MPI_Comm VMG::MPI::CommInfo::GetCommunicator(const GlobalIndices& global, const void* addr)
    48 {
    49   std::map<CommKey, MPI_Comm>::iterator iter = communicators.find(CommKey(global, addr));
     46  std::map<Key, MPI_Comm>::iterator iter = communicators.find(Key(grid));
    5047
    5148  if (iter != communicators.end())
     
    5754  MPI_Comm_rank(comm_global, &rank);
    5855
    59   if (global.SizeLocal().Product() == 0) {
     56  if (grid.Global().SizeLocal().Product() == 0) {
    6057
    6158    MPI_Comm_split(comm_global, MPI_UNDEFINED, rank, &comm);
    6259
    63     communicators.insert(std::make_pair(CommKey(global, addr), comm));
     60    communicators.insert(std::make_pair(Key(grid), comm));
    6461
    6562  }else {
     
    7875    MPI_Comm_free(&comm_temp);
    7976
    80     communicators.insert(std::make_pair(CommKey(global, addr), comm));
     77    communicators.insert(std::make_pair(Key(grid), comm));
    8178
    8279  }
     
    8784MPI_Comm VMG::MPI::CommInfo::GetUnionCommunicator(const Grid& grid_1, const Grid& grid_2)
    8885{
    89   std::map<CommKey, MPI_Comm>::iterator iter = communicators.find(CommKey(grid_1, grid_2));
     86  std::map<Key, MPI_Comm>::iterator iter = communicators.find(Key(grid_1, grid_2));
    9087
    9188  if (iter != communicators.end())
     
    10198    MPI_Comm_split(comm_global, MPI_UNDEFINED, rank, &comm);
    10299
    103     communicators.insert(std::make_pair(CommKey(grid_1, grid_2), comm));
     100    communicators.insert(std::make_pair(Key(grid_1, grid_2), comm));
    104101
    105102  }else {
     
    118115    MPI_Comm_free(&comm_temp);
    119116
    120     communicators.insert(std::make_pair(CommKey(grid_1, grid_2), comm));
     117    communicators.insert(std::make_pair(Key(grid_1, grid_2), comm));
    121118
    122119  }
     
    124121  return comm;
    125122}
    126 
    127 VMG::MPI::Datatypes& VMG::MPI::CommInfo::GetDatatypes(const Grid& grid)
    128 {
    129   std::map<const Grid*, Datatypes>::iterator iter = datatypes.find(&grid);
    130 
    131   if (iter == datatypes.end()) {
    132 
    133     iter = datatypes.insert(std::make_pair(&grid, Datatypes())).first;
    134 
    135     int rank, size;
    136     Index sizes, subsizes, starts;
    137 
    138     MPI_Comm comm = GetCommunicator(grid.Level());
    139 
    140     if (comm != MPI_COMM_NULL) {
    141 
    142       MPI_Comm_rank(comm, &rank);
    143       MPI_Comm_size(comm, &size);
    144 
    145       int* buffer = new int[6*size];
    146 
    147       for (int i=0; i<3; ++i) {
    148 
    149         buffer[6*rank + i  ] = grid.Global().BeginLocal()[i];
    150         buffer[6*rank + i+3] = grid.Global().SizeLocal()[i];
    151 
    152       }
    153 
    154       MPI_Allgather(MPI_IN_PLACE, 6, MPI_INT, buffer, 6, MPI_INT, comm);
    155 
    156       sizes = grid.Global().SizeGlobal();
    157 
    158       for (int i=0; i<size; ++i) {
    159 
    160         for (int j=0; j<3; ++j) {
    161 
    162           starts[j] = buffer[6*i + j];
    163           subsizes[j] = buffer[6*i + j+3];
    164 
    165         }
    166 
    167         Datatype dt(starts, starts+subsizes, sizes, comm);
    168 
    169         iter->second.push_back(dt);
    170 
    171       }
    172 
    173       delete [] buffer;
    174 
    175     }
    176 
    177   }
    178 
    179   return iter->second;
     123MPI_Datatype VMG::MPI::CommInfo::GetDatatypeSubarray(const Grid& grid, const GridIteratorSet& bounds)
     124{
     125  return GetDatatypeSubarray(bounds.Begin().GetBegin(), bounds.Begin().GetEnd(), grid.Local().SizeTotal());
     126}
     127
     128
     129MPI_Datatype VMG::MPI::CommInfo::GetDatatypeSubarray(const Index& begin, const Index& end, const Index& size_total)
     130{
     131  std::map<Key, MPI_Datatype>::iterator iter = datatypes.find(Key(begin, end, size_total));
     132
     133  if (iter != datatypes.end())
     134    return iter->second;
     135
     136  MPI_Datatype dt;
     137  Index sizes = size_total;
     138  Index subsizes = end - begin;
     139  Index starts = begin;
     140
     141  MPI_Type_create_subarray(3, sizes.vec(), subsizes.vec(), starts.vec(), MPI_ORDER_C, MPI_DOUBLE, &dt);
     142
     143  MPI_Type_commit(&dt);
     144
     145  return dt;
    180146}
    181147
  • src/comm/mpi/comm_info.hpp

    r138f86 r2112b1  
    2020
    2121#include "base/tuple.hpp"
    22 #include "comm/mpi/comm_key.hpp"
    23 #include "comm/mpi/datatype.hpp"
     22#include "comm/mpi/key.hpp"
    2423
    2524namespace VMG
     
    2827class Index;
    2928class Grid;
    30 class Multigrid;
     29class GridIteratorSet;
    3130
    3231namespace MPI
     
    4039
    4140  MPI_Comm GetCommunicator(const Grid& grid);
    42   MPI_Comm GetCommunicator(const GlobalIndices& global, const void* addr);
    4341  MPI_Comm GetUnionCommunicator(const Grid& grid_1, const Grid& grid_2);
    44   Datatypes& GetDatatypes(const Grid& grid);
     42
     43  MPI_Datatype GetDatatypeSubarray(const Grid& grid, const GridIteratorSet& bounds);
     44  MPI_Datatype GetDatatypeSubarray(const Index& begin, const Index& end, const Index& size_total);
    4545
    4646  Index Pos(const Grid& grid);
     
    5353
    5454private:
    55   std::map<const Grid*, VMG::MPI::Datatypes> datatypes;
    56   std::map<CommKey, MPI_Comm> communicators;
     55  std::map<Key, MPI_Comm> communicators;
     56  std::map<Key, MPI_Datatype> datatypes;
    5757
    5858  MPI_Comm comm_global;
Note: See TracChangeset for help on using the changeset viewer.