19#ifndef OPM_PARALLELWELLINFO_HEADER_INCLUDED
21#define OPM_PARALLELWELLINFO_HEADER_INCLUDED
22#include <dune/common/version.hh>
23#include <dune/common/parallel/mpihelper.hh>
24#include <dune/common/parallel/plocalindex.hh>
25#include <dune/istl/owneroverlapcopy.hh>
27#include <opm/simulators/utils/ParallelCommunication.hpp>
29#include <opm/common/ErrorMacros.hpp>
54 using LocalIndex = Dune::ParallelLocalIndex<Attribute>;
55 using IndexSet = Dune::ParallelIndexSet<int,LocalIndex,50>;
57 using RI = Dune::RemoteIndices<IndexSet>;
90 const double* current,
99 const double* current,
109 template<
class RAIterator>
112 if (this->comm_.size() < 2)
114 std::partial_sum(begin, end, begin);
124 using Value =
typename std::iterator_traits<RAIterator>::value_type;
125 std::vector<int> sizes(comm_.size());
126 std::vector<int> displ(comm_.size() + 1, 0);
127 using GlobalIndex =
typename IndexSet::IndexPair::GlobalIndex;
128 using Pair = std::pair<GlobalIndex,Value>;
129 std::vector<Pair> my_pairs;
130 my_pairs.reserve(current_indices_.size());
131 for (
const auto& pair: current_indices_)
133 if (pair.local().attribute() == owner)
135 my_pairs.emplace_back(pair.global(), begin[pair.local()]);
138 int mySize = my_pairs.size();
139 comm_.allgather(&mySize, 1, sizes.data());
140 std::partial_sum(sizes.begin(), sizes.end(), displ.begin()+1);
141 std::vector<Pair> global_pairs(displ.back());
142 comm_.allgatherv(my_pairs.data(), my_pairs.size(), global_pairs.data(), sizes.data(), displ.data());
144 std::sort(global_pairs.begin(), global_pairs.end(),
145 [](
const Pair& p1,
const Pair& p2){ return p1.first < p2.first; } );
146 std::vector<Value> sums(global_pairs.size());
147 std::transform(global_pairs.begin(), global_pairs.end(), sums.begin(),
148 [](
const Pair& p) { return p.second; });
149 std::partial_sum(sums.begin(), sums.end(),sums.begin());
151 auto global_pair = global_pairs.begin();
152 for (
const auto& pair: current_indices_)
154 global_pair = std::lower_bound(global_pair, global_pairs.end(),
156 [](
const Pair& val1,
const GlobalIndex& val2)
157 { return val1.first < val2; });
158 assert(global_pair != global_pairs.end());
159 assert(global_pair->first == pair.global());
160 begin[pair.local()] = sums[global_pair - global_pairs.begin()];
163 OPM_THROW(std::logic_error,
"In a sequential run the size of the communicator should be 1!");
171 int numLocalPerfs()
const;
174 Parallel::Communication comm_;
176 IndexSet current_indices_;
179 IndexSet above_indices_;
181 Dune::Interface interface_;
182 Dune::BufferedCommunicator communicator_;
184 std::size_t num_local_perfs_{};
196 using IndexSet = CommunicateAboveBelow::IndexSet;
197 using Attribute = CommunicateAboveBelow::Attribute;
198 using GlobalIndex =
typename IndexSet::IndexPair::GlobalIndex;
203 const Parallel::Communication comm,
204 int num_local_perfs);
211 std::vector<double>
createGlobal(
const std::vector<double>& local_perf_container,
212 std::size_t num_components)
const;
218 void copyGlobalToLocal(
const std::vector<double>& global, std::vector<double>& local,
219 std::size_t num_components)
const;
221 int numGlobalPerfs()
const;
224 const IndexSet& local_indices_;
225 Parallel::Communication comm_;
226 int num_global_perfs_;
228 std::vector<int> sizes_;
230 std::vector<int> displ_;
232 std::vector<int> map_received_;
236 std::vector<int> perf_ecl_index_;
245 static constexpr int INVALID_ECL_INDEX = -1;
258 Parallel::Communication allComm);
260 const Parallel::Communication& communication()
const
276 if (rankWithFirstPerf_ >= 0) {
278 assert(rankWithFirstPerf_ < comm_->size());
283 comm_->broadcast(&res, 1, rankWithFirstPerf_);
297 const double* current,
298 std::size_t size)
const;
304 const std::vector<double>& current)
const;
312 const double* current,
313 std::size_t size)
const;
319 const std::vector<double>& current)
const;
331 const std::string&
name()
const
339 return hasLocalCells_;
355 template<
typename It>
356 typename std::iterator_traits<It>::value_type
sumPerfValues(It begin, It end)
const
358 using V =
typename std::iterator_traits<It>::value_type;
360 auto local = std::accumulate(begin, end, V());
361 return communication().sum(local);
371 template<
class RAIterator>
374 commAboveBelow_->partialSumPerfValues(begin, end);
392 void operator()(Parallel::Communication* comm);
403 int rankWithFirstPerf_;
407 std::unique_ptr<Parallel::Communication, DestroyComm> comm_;
410 std::unique_ptr<CommunicateAboveBelow> commAboveBelow_;
412 std::unique_ptr<GlobalPerfContainerFactory> globalPerfCont_;
430 bool checkAllConnectionsFound();
433 std::vector<std::size_t> foundConnections_;
444bool operator<(
const std::pair<std::string, bool>& pair,
const ParallelWellInfo& well);
446bool operator<(
const ParallelWellInfo& well,
const std::pair<std::string, bool>& pair);
448bool operator==(
const std::pair<std::string, bool>& pair,
const ParallelWellInfo& well);
450bool operator==(
const ParallelWellInfo& well,
const std::pair<std::string, bool>& pair);
452bool operator!=(
const std::pair<std::string, bool>& pair,
const ParallelWellInfo& well);
454bool operator!=(
const ParallelWellInfo& well,
const std::pair<std::string, bool>& pair);
Class checking that all connections are on active cells.
Definition: ParallelWellInfo.hpp:419
void connectionFound(std::size_t index)
Inidicate that the i-th completion was found.
Definition: ParallelWellInfo.cpp:517
Class to facilitate getting values associated with the above/below perforation.
Definition: ParallelWellInfo.hpp:43
void beginReset()
Indicates that we will add the index information.
Definition: ParallelWellInfo.cpp:203
void clear()
Clear all the parallel information.
Definition: ParallelWellInfo.cpp:192
const IndexSet & getIndexSet() const
Get index set for the local perforations.
Definition: ParallelWellInfo.cpp:343
int endReset()
Indicates that the index information is complete.
Definition: ParallelWellInfo.cpp:215
std::vector< double > communicateAbove(double first_value, const double *current, std::size_t size)
Creates an array of values for the perforation above.
Definition: ParallelWellInfo.cpp:248
void partialSumPerfValues(RAIterator begin, RAIterator end) const
Do a (in place) partial sum on values attached to all perforations.
Definition: ParallelWellInfo.hpp:110
void pushBackEclIndex(int above, int current, bool owner=true)
Adds information about original index of the perforations in ECL Schedule.
Definition: ParallelWellInfo.cpp:303
std::vector< double > communicateBelow(double first_value, const double *current, std::size_t size)
Creates an array of values for the perforation below.
Definition: ParallelWellInfo.cpp:275
A factory for creating a global data representation for distributed wells.
Definition: ParallelWellInfo.hpp:194
void copyGlobalToLocal(const std::vector< double > &global, std::vector< double > &local, std::size_t num_components) const
Copies the values of the global perforation to the local representation.
Definition: ParallelWellInfo.cpp:150
GlobalPerfContainerFactory(const IndexSet &local_indices, const Parallel::Communication comm, int num_local_perfs)
Constructor.
Definition: ParallelWellInfo.cpp:49
std::vector< double > createGlobal(const std::vector< double > &local_perf_container, std::size_t num_components) const
Creates a container that holds values for all perforations.
Definition: ParallelWellInfo.cpp:101
Class encapsulating some information about parallel wells.
Definition: ParallelWellInfo.hpp:243
bool hasLocalCells() const
Whether local cells are perforated somewhen.
Definition: ParallelWellInfo.hpp:337
void pushBackEclIndex(int above, int current)
Adds information about the ecl indices of the perforations.
Definition: ParallelWellInfo.cpp:391
void clear()
Free data of communication data structures.
Definition: ParallelWellInfo.cpp:411
const std::string & name() const
Name of the well.
Definition: ParallelWellInfo.hpp:331
ParallelWellInfo(const std::string &name={""}, bool hasLocalCells=true)
Constructs object using MPI_COMM_SELF.
Definition: ParallelWellInfo.cpp:353
void communicateFirstPerforation(bool hasFirst)
Collectively decide which rank has first perforation.
Definition: ParallelWellInfo.cpp:380
const GlobalPerfContainerFactory & getGlobalPerfContainerFactory() const
Get a factor to create a global representation of peforation data.
Definition: ParallelWellInfo.cpp:448
std::vector< double > communicateBelowValues(double last_value, const double *current, std::size_t size) const
Creates an array of values for the perforation below.
Definition: ParallelWellInfo.cpp:432
std::vector< double > communicateAboveValues(double first_value, const double *current, std::size_t size) const
Creates an array of values for the perforation above.
Definition: ParallelWellInfo.cpp:417
std::iterator_traits< It >::value_type sumPerfValues(It begin, It end) const
Sum all the values of the perforations.
Definition: ParallelWellInfo.hpp:356
T broadcastFirstPerforationValue(const T &t) const
If the well does not have any open connections the member rankWithFirstPerf is not initialized,...
Definition: ParallelWellInfo.hpp:273
void beginReset()
Inidicate that we will reset the ecl index information.
Definition: ParallelWellInfo.cpp:396
void partialSumPerfValues(RAIterator begin, RAIterator end) const
Do a (in place) partial sum on values attached to all perforations.
Definition: ParallelWellInfo.hpp:372
void endReset()
Inidicate completion of reset of the ecl index information.
Definition: ParallelWellInfo.cpp:402
This file contains a set of helper functions used by VFPProd / VFPInj.
Definition: BlackoilPhases.hpp:27