My Project
ParallelWellInfo.hpp
1 /*
2  Copyright 2020 OPM-OP AS
3 
4  This file is part of the Open Porous Media project (OPM).
5 
6  OPM is free software: you can redistribute it and/or modify
7  it under the terms of the GNU General Public License as published by
8  the Free Software Foundation, either version 3 of the License, or
9  (at your option) any later version.
10 
11  OPM is distributed in the hope that it will be useful,
12  but WITHOUT ANY WARRANTY; without even the implied warranty of
13  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14  GNU General Public License for more details.
15 
16  You should have received a copy of the GNU General Public License
17  along with OPM. If not, see <http://www.gnu.org/licenses/>.
18 */
19 #ifndef OPM_PARALLELWELLINFO_HEADER_INCLUDED
20 
21 #define OPM_PARALLELWELLINFO_HEADER_INCLUDED
22 #include <dune/common/version.hh>
23 #include <dune/common/parallel/mpihelper.hh>
24 #include <dune/common/parallel/plocalindex.hh>
25 #include <dune/istl/owneroverlapcopy.hh>
26 
27 #include <opm/simulators/utils/ParallelCommunication.hpp>
28 
29 #include <opm/common/ErrorMacros.hpp>
30 
31 #include <memory>
32 #include <iterator>
33 #include <numeric>
34 
35 namespace Opm
36 {
37 
38 class Well;
39 
43 {
44 public:
45  enum Attribute {
46  owner=1,
47  overlap=2,
48  // there is a bug in older versions of DUNE that will skip
49  // entries with matching attributes in RemoteIndices that are local
50  // therefore we add one more version for above.
51  ownerAbove = 3,
52  overlapAbove = 4
53  };
54  using MPIComm = typename Dune::MPIHelper::MPICommunicator;
55 #if DUNE_VERSION_NEWER(DUNE_COMMON, 2, 7)
56  using Communication = Dune::Communication<MPIComm>;
57 #else
58  using Communication = Dune::CollectiveCommunication<MPIComm>;
59 #endif
60  using LocalIndex = Dune::ParallelLocalIndex<Attribute>;
61  using IndexSet = Dune::ParallelIndexSet<int,LocalIndex,50>;
62 #if HAVE_MPI
63  using RI = Dune::RemoteIndices<IndexSet>;
64 #endif
65 
66  explicit CommunicateAboveBelow(const Communication& comm);
74  void pushBackEclIndex(int above, int current, bool owner=true);
75 
77  void clear();
78 
81  void beginReset();
82 
88  int endReset();
89 
95  std::vector<double> communicateAbove(double first_value,
96  const double* current,
97  std::size_t size);
98 
104  std::vector<double> communicateBelow(double first_value,
105  const double* current,
106  std::size_t size);
107 
115  template<class RAIterator>
116  void partialSumPerfValues(RAIterator begin, RAIterator end) const
117  {
118  if (this->comm_.size() < 2)
119  {
120  std::partial_sum(begin, end, begin);
121  }
122  else
123  {
124 #if HAVE_MPI
125  // The global index used in the index set current_indices
126  // is the index of the perforation in ECL Schedule definition.
127  // This is assumed to give the topological order that is used
128  // when doing the partial sum.
129  // allgather the index of the perforation in ECL schedule and the value.
130  using Value = typename std::iterator_traits<RAIterator>::value_type;
131  std::vector<int> sizes(comm_.size());
132  std::vector<int> displ(comm_.size() + 1, 0);
133  using GlobalIndex = typename IndexSet::IndexPair::GlobalIndex;
134  using Pair = std::pair<GlobalIndex,Value>;
135  std::vector<Pair> my_pairs;
136  my_pairs.reserve(current_indices_.size());
137  for (const auto& pair: current_indices_)
138  {
139  if (pair.local().attribute() == owner)
140  {
141  my_pairs.emplace_back(pair.global(), begin[pair.local()]);
142  }
143  }
144  int mySize = my_pairs.size();
145  comm_.allgather(&mySize, 1, sizes.data());
146  std::partial_sum(sizes.begin(), sizes.end(), displ.begin()+1);
147  std::vector<Pair> global_pairs(displ.back());
148  comm_.allgatherv(my_pairs.data(), my_pairs.size(), global_pairs.data(), sizes.data(), displ.data());
149  // sort the complete range to get the correct ordering
150  std::sort(global_pairs.begin(), global_pairs.end(),
151  [](const Pair& p1, const Pair& p2){ return p1.first < p2.first; } );
152  std::vector<Value> sums(global_pairs.size());
153  std::transform(global_pairs.begin(), global_pairs.end(), sums.begin(),
154  [](const Pair& p) { return p.second; });
155  std::partial_sum(sums.begin(), sums.end(),sums.begin());
156  // assign the values (both ranges are sorted by the ecl index)
157  auto global_pair = global_pairs.begin();
158  for (const auto& pair: current_indices_)
159  {
160  global_pair = std::lower_bound(global_pair, global_pairs.end(),
161  pair.global(),
162  [](const Pair& val1, const GlobalIndex& val2)
163  { return val1.first < val2; });
164  assert(global_pair != global_pairs.end());
165  assert(global_pair->first == pair.global());
166  begin[pair.local()] = sums[global_pair - global_pairs.begin()];
167  }
168 #else
169  OPM_THROW(std::logic_error, "In a sequential run the size of the communicator should be 1!");
170 #endif
171  }
172  }
173 
175  const IndexSet& getIndexSet() const;
176 
177  int numLocalPerfs() const;
178 private:
179  Communication comm_;
181  IndexSet current_indices_;
182 #if HAVE_MPI
184  IndexSet above_indices_;
185  RI remote_indices_;
186  Dune::Interface interface_;
187  Dune::BufferedCommunicator communicator_;
188 #endif
189  std::size_t num_local_perfs_{};
190 };
191 
199 {
200 public:
201  using MPIComm = typename Dune::MPIHelper::MPICommunicator;
202 #if DUNE_VERSION_NEWER(DUNE_COMMON, 2, 7)
203  using Communication = Dune::Communication<MPIComm>;
204 #else
205  using Communication = Dune::CollectiveCommunication<MPIComm>;
206 #endif
207  using IndexSet = CommunicateAboveBelow::IndexSet;
208  using Attribute = CommunicateAboveBelow::Attribute;
209  using GlobalIndex = typename IndexSet::IndexPair::GlobalIndex;
210 
213  GlobalPerfContainerFactory(const IndexSet& local_indices, const Communication comm,
214  int num_local_perfs);
215 
221  std::vector<double> createGlobal(const std::vector<double>& local_perf_container,
222  std::size_t num_components) const;
223 
228  void copyGlobalToLocal(const std::vector<double>& global, std::vector<double>& local,
229  std::size_t num_components) const;
230 
231  int numGlobalPerfs() const;
232 private:
233  const IndexSet& local_indices_;
234  Communication comm_;
235  int num_global_perfs_;
237  std::vector<int> sizes_;
239  std::vector<int> displ_;
241  std::vector<int> map_received_;
245  std::vector<int> perf_ecl_index_;
246 };
247 
252 {
253 public:
254  using MPIComm = typename Dune::MPIHelper::MPICommunicator;
255 #if DUNE_VERSION_NEWER(DUNE_COMMON, 2, 7)
256  using Communication = Dune::Communication<MPIComm>;
257 #else
258  using Communication = Dune::CollectiveCommunication<MPIComm>;
259 #endif
260 
261  static constexpr int INVALID_ECL_INDEX = -1;
262 
264  ParallelWellInfo(const std::string& name = {""},
265  bool hasLocalCells = true);
266 
273  ParallelWellInfo(const std::pair<std::string,bool>& well_info,
274  Communication allComm);
275 
276  const Communication& communication() const
277  {
278  return *comm_;
279  }
280 
282  void communicateFirstPerforation(bool hasFirst);
283 
284 
288  template<class T>
289  T broadcastFirstPerforationValue(const T& t) const
290  {
291  T res = t;
292  if (rankWithFirstPerf_ >= 0) {
293 #ifndef NDEBUG
294  assert(rankWithFirstPerf_ < comm_->size());
295  // At least on some OpenMPI version this might broadcast might interfere
296  // with other communication if there are bugs
297  comm_->barrier();
298 #endif
299  comm_->broadcast(&res, 1, rankWithFirstPerf_);
300 #ifndef NDEBUG
301  comm_->barrier();
302 #endif
303  }
304  return res;
305  }
306 
312  std::vector<double> communicateAboveValues(double first_value,
313  const double* current,
314  std::size_t size) const;
315 
319  std::vector<double> communicateAboveValues(double first_value,
320  const std::vector<double>& current) const;
321 
327  std::vector<double> communicateBelowValues(double last_value,
328  const double* current,
329  std::size_t size) const;
330 
334  std::vector<double> communicateBelowValues(double last_value,
335  const std::vector<double>& current) const;
336 
344  void pushBackEclIndex(int above, int current);
345 
347  const std::string& name() const
348  {
349  return name_;
350  }
351 
353  bool hasLocalCells() const
354  {
355  return hasLocalCells_;
356  }
357  bool isOwner() const
358  {
359  return isOwner_;
360  }
361 
365  void beginReset();
366 
368  void endReset();
369 
371  template<typename It>
372  typename std::iterator_traits<It>::value_type sumPerfValues(It begin, It end) const
373  {
374  using V = typename std::iterator_traits<It>::value_type;
376  auto local = std::accumulate(begin, end, V());
377  return communication().sum(local);
378  }
379 
387  template<class RAIterator>
388  void partialSumPerfValues(RAIterator begin, RAIterator end) const
389  {
390  commAboveBelow_->partialSumPerfValues(begin, end);
391  }
392 
394  void clear();
395 
402 private:
403 
405  struct DestroyComm
406  {
407  void operator()(Communication* comm);
408  };
409 
410 
412  std::string name_;
414  bool hasLocalCells_;
416  bool isOwner_;
418  int rankWithFirstPerf_;
422  std::unique_ptr<Communication, DestroyComm> comm_;
423 
425  std::unique_ptr<CommunicateAboveBelow> commAboveBelow_;
426 
427  std::unique_ptr<GlobalPerfContainerFactory> globalPerfCont_;
428 };
429 
434 {
435 public:
436  CheckDistributedWellConnections(const Well& well,
437  const ParallelWellInfo& info);
438 
443  void connectionFound(std::size_t index);
444 
445  bool checkAllConnectionsFound();
446 private:
447  std::vector<std::size_t> foundConnections_;
448  const Well& well_;
449  const ParallelWellInfo& pwinfo_;
450 };
451 
452 bool operator<(const ParallelWellInfo& well1, const ParallelWellInfo& well2);
453 
454 bool operator==(const ParallelWellInfo& well1, const ParallelWellInfo& well2);
455 
456 bool operator!=(const ParallelWellInfo& well1, const ParallelWellInfo& well2);
457 
458 bool operator<(const std::pair<std::string, bool>& pair, const ParallelWellInfo& well);
459 
460 bool operator<( const ParallelWellInfo& well, const std::pair<std::string, bool>& pair);
461 
462 bool operator==(const std::pair<std::string, bool>& pair, const ParallelWellInfo& well);
463 
464 bool operator==(const ParallelWellInfo& well, const std::pair<std::string, bool>& pair);
465 
466 bool operator!=(const std::pair<std::string, bool>& pair, const ParallelWellInfo& well);
467 
468 bool operator!=(const ParallelWellInfo& well, const std::pair<std::string, bool>& pair);
469 
470 } // end namespace Opm
471 #endif // OPM_PARALLELWELLINFO_HEADER_INCLUDED
Class checking that all connections are on active cells.
Definition: ParallelWellInfo.hpp:434
void connectionFound(std::size_t index)
Inidicate that the i-th completion was found.
Definition: ParallelWellInfo.cpp:516
Class to facilitate getting values associated with the above/below perforation.
Definition: ParallelWellInfo.hpp:43
void beginReset()
Indicates that we will add the index information.
Definition: ParallelWellInfo.cpp:202
void clear()
Clear all the parallel information.
Definition: ParallelWellInfo.cpp:191
const IndexSet & getIndexSet() const
Get index set for the local perforations.
Definition: ParallelWellInfo.cpp:342
int endReset()
Indicates that the index information is complete.
Definition: ParallelWellInfo.cpp:214
std::vector< double > communicateAbove(double first_value, const double *current, std::size_t size)
Creates an array of values for the perforation above.
Definition: ParallelWellInfo.cpp:247
void partialSumPerfValues(RAIterator begin, RAIterator end) const
Do a (in place) partial sum on values attached to all perforations.
Definition: ParallelWellInfo.hpp:116
void pushBackEclIndex(int above, int current, bool owner=true)
Adds information about original index of the perforations in ECL Schedule.
Definition: ParallelWellInfo.cpp:302
std::vector< double > communicateBelow(double first_value, const double *current, std::size_t size)
Creates an array of values for the perforation below.
Definition: ParallelWellInfo.cpp:274
A factory for creating a global data representation for distributed wells.
Definition: ParallelWellInfo.hpp:199
void copyGlobalToLocal(const std::vector< double > &global, std::vector< double > &local, std::size_t num_components) const
Copies the values of the global perforation to the local representation.
Definition: ParallelWellInfo.cpp:149
GlobalPerfContainerFactory(const IndexSet &local_indices, const Communication comm, int num_local_perfs)
Constructor.
Definition: ParallelWellInfo.cpp:49
std::vector< double > createGlobal(const std::vector< double > &local_perf_container, std::size_t num_components) const
Creates a container that holds values for all perforations.
Definition: ParallelWellInfo.cpp:100
Class encapsulating some information about parallel wells.
Definition: ParallelWellInfo.hpp:252
bool hasLocalCells() const
Whether local cells are perforated somewhen.
Definition: ParallelWellInfo.hpp:353
const std::string & name() const
Name of the well.
Definition: ParallelWellInfo.hpp:347
void pushBackEclIndex(int above, int current)
Adds information about the ecl indices of the perforations.
Definition: ParallelWellInfo.cpp:390
ParallelWellInfo(const std::pair< std::string, bool > &well_info, Communication allComm)
Constructs object with communication between all rank sharing a well.
void clear()
Free data of communication data structures.
Definition: ParallelWellInfo.cpp:410
ParallelWellInfo(const std::string &name={""}, bool hasLocalCells=true)
Constructs object using MPI_COMM_SELF.
Definition: ParallelWellInfo.cpp:352
void communicateFirstPerforation(bool hasFirst)
Collectively decide which rank has first perforation.
Definition: ParallelWellInfo.cpp:379
const GlobalPerfContainerFactory & getGlobalPerfContainerFactory() const
Get a factor to create a global representation of peforation data.
Definition: ParallelWellInfo.cpp:447
std::vector< double > communicateBelowValues(double last_value, const double *current, std::size_t size) const
Creates an array of values for the perforation below.
Definition: ParallelWellInfo.cpp:431
std::vector< double > communicateAboveValues(double first_value, const double *current, std::size_t size) const
Creates an array of values for the perforation above.
Definition: ParallelWellInfo.cpp:416
std::iterator_traits< It >::value_type sumPerfValues(It begin, It end) const
Sum all the values of the perforations.
Definition: ParallelWellInfo.hpp:372
T broadcastFirstPerforationValue(const T &t) const
If the well does not have any open connections the member rankWithFirstPerf is not initialized,...
Definition: ParallelWellInfo.hpp:289
void beginReset()
Inidicate that we will reset the ecl index information.
Definition: ParallelWellInfo.cpp:395
void partialSumPerfValues(RAIterator begin, RAIterator end) const
Do a (in place) partial sum on values attached to all perforations.
Definition: ParallelWellInfo.hpp:388
void endReset()
Inidicate completion of reset of the ecl index information.
Definition: ParallelWellInfo.cpp:401
This file contains a set of helper functions used by VFPProd / VFPInj.
Definition: BlackoilPhases.hpp:27