Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions changelog-entries/663.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
- Enable parallel execution of the participant macro-dumux in the two-scale heat conduction tutorial [#663](https://github.com/precice/tutorials/pull/663)
5 changes: 4 additions & 1 deletion two-scale-heat-conduction/macro-dumux/appl/main.cc
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@

#include <dune/common/parallel/mpihelper.hh>
#include <dune/common/timer.hh>
#include <dune/grid/common/partitionset.hh>
#include <dune/grid/io/file/vtk.hh>
#include <dune/istl/io.hh>

Expand Down Expand Up @@ -118,7 +119,7 @@ int main(int argc, char **argv)
// coordinate loop (created vectors are 1D)
// these positions of cell centers are later communicated to precice
std::cout << "Coordinates: " << std::endl;
for (const auto &element : elements(leafGridView)) {
for (const auto &element : elements(leafGridView, Dune::Partitions::interior)) {
auto fvGeometry = localView(*gridGeometry);
fvGeometry.bindElement(element);
for (const auto &scv : scvs(fvGeometry)) {
Expand Down Expand Up @@ -275,6 +276,8 @@ int main(int argc, char **argv)
dt);
couplingParticipant.readQuantityFromOtherSolver(meshName,
readDataPorosity, dt);
// store coupling data in problem
problem->spatialParams().updateCouplingData();
}
std::cout << "Solver starts" << std::endl;

Expand Down
57 changes: 44 additions & 13 deletions two-scale-heat-conduction/macro-dumux/appl/spatialparams.hh
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,9 @@
#ifndef DUMUX_TEST_1PNI_SPATIAL_PARAMS_HH
#define DUMUX_TEST_1PNI_SPATIAL_PARAMS_HH

#include <dune/grid/common/partitionset.hh>

#include <dumux/parallel/vectorcommdatahandle.hh>
#include <dumux/porousmediumflow/fvspatialparams1p.hh>
#include <dumux/porousmediumflow/properties.hh>

Expand Down Expand Up @@ -48,8 +51,9 @@ public:
using PermeabilityType = Scalar;

OnePNISpatialParams(std::shared_ptr<const GridGeometry> gridGeometry)
: ParentType(gridGeometry),
couplingParticipant_(Dumux::Precice::CouplingAdapter::getInstance()) {}
: ParentType(gridGeometry), couplingParticipant_(Dumux::Precice::CouplingAdapter::getInstance()), couplingData_(gridGeometry->numDofs()), couplingDataHandle_(this->gridGeometry().elementMapper(), couplingData_)
{
}

/*!
* \brief Defines the intrinsic permeability \f$\mathrm{[m^2]}\f$.
Expand All @@ -71,8 +75,7 @@ public:
const ElementSolution &elemSol) const
{
if (getParam<bool>("Precice.RunWithCoupling") == true)
return couplingParticipant_.getScalarQuantityOnFace(
"macro-mesh", "porosity", scv.elementIndex());
return couplingData_[scv.elementIndex()][0];
else
return getParam<Scalar>("Problem.DefaultPorosity");
}
Expand All @@ -87,14 +90,10 @@ public:
DimWorldMatrix K;

if (getParam<bool>("Precice.RunWithCoupling") == true) {
K[0][0] = couplingParticipant_.getScalarQuantityOnFace(
"macro-mesh", "k_00", scv.elementIndex());
K[0][1] = couplingParticipant_.getScalarQuantityOnFace(
"macro-mesh", "k_01", scv.elementIndex());
K[1][0] = couplingParticipant_.getScalarQuantityOnFace(
"macro-mesh", "k_10", scv.elementIndex());
K[1][1] = couplingParticipant_.getScalarQuantityOnFace(
"macro-mesh", "k_11", scv.elementIndex());
K[0][0] = couplingData_[scv.elementIndex()][1];
K[0][1] = couplingData_[scv.elementIndex()][2];
K[1][0] = couplingData_[scv.elementIndex()][3];
K[1][1] = couplingData_[scv.elementIndex()][4];
} else {
K[0][0] = getParam<Scalar>("Component.SolidThermalConductivity");
K[0][1] = 0.0;
Expand All @@ -104,8 +103,40 @@ public:
return K;
}

void updateCouplingData()
{
for (const auto &element : elements(this->gridGeometry().gridView(), Dune::Partitions::interior)) {
auto fvGeometry = localView(this->gridGeometry());
fvGeometry.bindElement(element);
for (const auto &scv : scvs(fvGeometry)) {
const auto elementIdx = scv.elementIndex();
couplingData_[elementIdx][0] =
couplingParticipant_.getScalarQuantityOnFace("macro-mesh", "porosity", elementIdx);
couplingData_[elementIdx][1] =
couplingParticipant_.getScalarQuantityOnFace("macro-mesh", "k_00", elementIdx);
couplingData_[elementIdx][2] =
couplingParticipant_.getScalarQuantityOnFace("macro-mesh", "k_01", elementIdx);
couplingData_[elementIdx][3] =
couplingParticipant_.getScalarQuantityOnFace("macro-mesh", "k_10", elementIdx);
couplingData_[elementIdx][4] =
couplingParticipant_.getScalarQuantityOnFace("macro-mesh", "k_11", elementIdx);
}
}
// Trigger exchange of coupling data between neighboring ranks, if the domain is partitioned
if (this->gridGeometry().gridView().comm().size() > 1) {
this->gridGeometry().gridView().communicate(couplingDataHandle_,
Dune::InteriorBorder_All_Interface, Dune::ForwardCommunication);
}
}

private:
Dumux::Precice::CouplingAdapter &couplingParticipant_;
Dumux::Precice::CouplingAdapter &couplingParticipant_;
Dune::BlockVector<Dune::FieldVector<double, 5>> couplingData_;
Dumux::VectorCommDataHandleEqual<
typename GridGeometry::ElementMapper,
Dune::BlockVector<Dune::FieldVector<double, 5>>,
/* Entity codimension = */ 0>
couplingDataHandle_;
};

} // end namespace Dumux
Expand Down