Changeset 177495 for src/comm


Ignore:
Timestamp:
Apr 17, 2013, 6:35:40 PM (13 years ago)
Author:
Julian Iseringhausen <isering@…>
Children:
4f3474
Parents:
d6a338
Message:

Implemented boundary values derived from continious problem.

Location:
src/comm
Files:
7 edited

Legend:

Unmodified
Added
Removed
  • src/comm/comm.hpp

    rd6a338 r177495  
    3030
    3131#include <map>
    32 #include <list>
     32#include <vector>
    3333
    3434#include "base/defs.hpp"
     
    4141{
    4242
     43class BoundaryValue;
    4344class DomainDecomposition;
    4445class GlobalIndices;
     
    7071  virtual void CommSubgrid(Grid& grid_old, Grid& grid_new, const int& direction) = 0;
    7172  virtual void CommAddSubgrid(Grid& grid_old, Grid& grid_new, const int& direction) = 0;
     73  virtual std::vector<BoundaryValue> CommBoundaryValues() = 0;
    7274
    7375  virtual void CommToGhostsAsyncStart(Grid& grid) = 0;
     
    139141  const std::map<Index, std::vector<GlobalIndices> >& DecomposedGlobal() const {return decomposed_global;}
    140142
    141 
    142143protected:
    143144  const std::string& OutputPath();
  • src/comm/comm_mpi.cpp

    rd6a338 r177495  
    5858#include "comm/comm_mpi.hpp"
    5959#include "comm/mpi/datatypes_local.hpp"
     60#include "discretization/boundary_value_setter.hpp"
    6061#include "grid/grid.hpp"
    6162#include "grid/multigrid.hpp"
     
    126127    AddBufferAll(grid_new, datatypes.Receive());
    127128  }
     129}
     130
     131std::vector<BoundaryValue> CommMPI::CommBoundaryValues()
     132{
     133  MPI_Comm comm = settings.CommunicatorLocal((*MG::GetRhs())(MG::GetRhs()->MaxLevel()));
     134  if (comm != MPI_COMM_NULL) {
     135
     136    const std::vector<BoundaryValue>& bvs = MG::GetBoundaryValueSetter()->BoundaryValues();
     137
     138    std::vector<vmg_float> val_buffer; val_buffer.reserve(bvs.size());
     139
     140    int comm_size, comm_rank;
     141    MPI_Comm_rank(comm, &comm_rank);
     142    MPI_Comm_size(comm, &comm_size);
     143
     144    std::vector< std::vector<BoundaryValue> > bvs_distributed(comm_size);
     145
     146    for (unsigned int i=0; i<bvs.size(); ++i)
     147      bvs_distributed[settings.BoundaryRanks()[i]].push_back(bvs[i]);
     148
     149    int bvs_count[comm_size];
     150    for (int i=0; i<comm_size; ++i) {
     151      bvs_count[i] = bvs_distributed[i].size();
     152      for (unsigned int j=0; j<bvs_distributed[i].size(); ++j)
     153        val_buffer.push_back(bvs_distributed[i][j].Val());
     154    }
     155
     156    MPI_Reduce_scatter(MPI_IN_PLACE, &val_buffer.front(), bvs_count, MPI_DOUBLE, MPI_SUM, comm);
     157
     158    int begin = 0;
     159    for (int i=0; i<comm_rank; ++i)
     160      begin += bvs_distributed[i].size();
     161
     162    for (unsigned int i=0; i<bvs_distributed[comm_rank].size(); ++i)
     163      bvs_distributed[comm_rank][i].Val() = val_buffer[begin+i];
     164
     165    return bvs_distributed[comm_rank];
     166  }
     167
     168  return std::vector<BoundaryValue>();
    128169}
    129170
  • src/comm/comm_mpi.hpp

    rd6a338 r177495  
    8181  void CommSubgrid(Grid& grid_old, Grid& grid_new, const int& direction);
    8282  void CommAddSubgrid(Grid& grid_old, Grid& grid_new, const int& direction);
     83  std::vector<BoundaryValue> CommBoundaryValues();
    8384
    8485  void CommToGhostsAsyncStart(Grid& grid);
  • src/comm/comm_serial.cpp

    rd6a338 r177495  
    4545#include "base/vector.hpp"
    4646#include "comm/comm_serial.hpp"
     47#include "discretization/boundary_value_setter.hpp"
    4748#include "grid/multigrid.hpp"
    4849#include "grid/tempgrid.hpp"
     
    7980  for (iter = grid_old.Iterators().CompleteGrid().Begin(); iter != grid_old.Iterators().CompleteGrid().End(); ++iter)
    8081    grid_new(*iter) += grid_old.GetVal(*iter);
     82}
     83
     84std::vector<BoundaryValue> CommSerial::CommBoundaryValues()
     85{
     86  return MG::GetBoundaryValueSetter()->BoundaryValues();
    8187}
    8288
  • src/comm/comm_serial.hpp

    rd6a338 r177495  
    5858  void CommSubgrid(Grid& grid_old, Grid& grid_new, const int& direction);
    5959  void CommAddSubgrid(Grid& grid_old, Grid& grid_new, const int& direction);
     60  std::vector<BoundaryValue> CommBoundaryValues();
    6061
    6162  void CommToGhostsAsyncStart(Grid& grid);
  • src/comm/mpi/settings.cpp

    rd6a338 r177495  
    4444#include <string>
    4545
     46#include "base/index.hpp"
    4647#include "comm/comm.hpp"
    4748#include "comm/mpi/settings.hpp"
     49#include "discretization/boundary_value_setter.hpp"
    4850#include "grid/multigrid.hpp"
    4951#include "grid/tempgrid.hpp"
     
    218220    AddDatatypeGlobal(FinerGrid(sol(i-1)), sol(i), 1);
    219221  }
     222
     223  InitializeBoundaryValues();
    220224}
    221225
     
    428432}
    429433
     434void VMG::MPI::Settings::InitializeBoundaryValues()
     435{
     436  assert(bv_ranks.size() == 0);
     437
     438  if (MG::GetFactory().TestObject("BOUNDARY_VALUE_SETTER")) {
     439
     440    Index coord;
     441
     442    const int level_index = MG::GetRhs()->MaxLevel() - MG::GetRhs()->GlobalMaxLevel();
     443    const std::vector<BoundaryValue>& bvs = MG::GetBoundaryValueSetter()->BoundaryValues();
     444    const std::map<Index, std::vector<GlobalIndices> >& global = MG::GetComm()->DecomposedGlobal();
     445
     446    assert(global.find(0)->second[level_index].BoundaryType() == GlobalMax);
     447
     448    MPI_Comm comm = CommunicatorLocal((*MG::GetRhs())(MG::GetRhs()->GlobalMaxLevel()));
     449
     450    bv_ranks.reserve(bvs.size());
     451
     452    for (std::vector<BoundaryValue>::const_iterator iter_b = bvs.begin(); iter_b != bvs.end(); ++iter_b) {
     453      for (std::map<Index, std::vector<GlobalIndices> >::const_iterator iter_g = global.begin(); iter_g != global.end(); ++iter_g) {
     454        if (iter_b->GetIndex().IsComponentwiseGreaterOrEqual(iter_g->second[level_index].LocalBegin()) &&
     455            iter_b->GetIndex().IsComponentwiseLess(iter_g->second[level_index].LocalEnd())) {
     456            bv_ranks.push_back(0);
     457            coord = iter_g->first;
     458            MPI_Cart_rank(comm, coord.vec(), &bv_ranks.back());
     459          break;
     460        }
     461      }
     462    }
     463  }
     464}
     465
    430466std::string VMG::MPI::Settings::ToString() const
    431467{
  • src/comm/mpi/settings.hpp

    rd6a338 r177495  
    7070  VMG::MPI::DatatypesLocal& DatatypesLocal(const Grid& grid);
    7171
     72  const std::vector<int>& BoundaryRanks() const {return bv_ranks;}
     73
    7274  std::string ToString() const;
    7375
     
    7981  void CreateLocalCommunicator(MPI_Comm& comm_global, const Grid& grid);
    8082
     83  void InitializeBoundaryValues();
     84
    8185  std::map<int, MPI_Comm> communicators_global;
    8286  std::map<KeyUnsorted, MPI_Comm> communicators_local;
     
    8690  std::map<KeyUnsorted, VMG::MPI::DatatypesGlobal> datatypes_global;
    8791  std::map<KeyUnsorted, VMG::MPI::DatatypesLocal> datatypes_local;
     92
     93  std::vector<int> bv_ranks;
     94
    8895};
    8996
Note: See TracChangeset for help on using the changeset viewer.