From cf836bbfb226b7082a1907ca9b7a46dfa2132915 Mon Sep 17 00:00:00 2001 From: Kyle Nelli Date: Thu, 16 Feb 2023 15:39:05 -0800 Subject: [PATCH 1/5] Use proper wording in VectorImpl error messages --- docs/DevGuide/ImplementingVectors.md | 13 +++++++------ src/DataStructures/VectorImpl.hpp | 4 ++-- .../Test_ComplexDataVectorAsserts.cpp | 2 +- .../Test_ComplexDiagonalModalOperator.cpp | 2 +- .../Unit/DataStructures/Test_ComplexModalVector.cpp | 2 +- .../Unit/DataStructures/Test_DataVectorAsserts.cpp | 2 +- .../DataStructures/Test_DiagonalModalOperator.cpp | 2 +- tests/Unit/DataStructures/Test_ModalVector.cpp | 2 +- .../DataStructures/Test_NonZeroStaticSizeVector.cpp | 2 +- tests/Unit/DataStructures/Test_Variables.cpp | 4 ++-- .../Helpers/DataStructures/VectorImplTestHelper.hpp | 2 +- 11 files changed, 19 insertions(+), 18 deletions(-) diff --git a/docs/DevGuide/ImplementingVectors.md b/docs/DevGuide/ImplementingVectors.md index a1ed6bf5738f..0783d29615df 100644 --- a/docs/DevGuide/ImplementingVectors.md +++ b/docs/DevGuide/ImplementingVectors.md @@ -282,17 +282,18 @@ of `std::move`. This function intentionally generates an error when assigning values from one vector to a differently sized, non-owning vector (made non-owning by use of `set_data_ref`). The assertion test which calls this function should search for -the string "Must copy into same size". Three forms of the test are provided, -which are switched between using a value from the enum `RefSizeErrorTestKind` in -the first function argument: +the string "Must copy/move/assign into same size". Three forms of the test are +provided, which are switched between using a value from the enum +`RefSizeErrorTestKind` in the first function argument: - `RefSizeErrorTestKind::Copy`: tests that the size error is appropriately - generated when copying to a non-owning vector of the wrong size. + generated when copying to a non-owning vector of the wrong size. This has + "copy" in the message. - `RefSizeErrorTestKind::ExpressionAssign`: tests that the size error is appropriately generated when assigning the result of a mathematical expression - to a non-owning vector of the wrong size. + to a non-owning vector of the wrong size. This has "assign" in the message. - `RefSizeErrorTestKind::Move`: tests that the size error is appropriately generated when a vector is `std::move`d into a non-owning vector of the wrong - size + size. This has "move" in the message. ## `TestHelpers::VectorImpl::test_functions_with_vector_arguments()` diff --git a/src/DataStructures/VectorImpl.hpp b/src/DataStructures/VectorImpl.hpp index eeb7eb931359..49e645137e15 100644 --- a/src/DataStructures/VectorImpl.hpp +++ b/src/DataStructures/VectorImpl.hpp @@ -392,7 +392,7 @@ VectorImpl::operator=( reset_pointer_vector(size()); rhs.clear(); } else { - ASSERT(rhs.size() == size(), "Must copy into same size, not " + ASSERT(rhs.size() == size(), "Must move into same size, not " << rhs.size() << " into " << size()); if (LIKELY(data() != rhs.data())) { std::memcpy(data(), rhs.data(), size() * sizeof(value_type)); @@ -432,7 +432,7 @@ VectorImpl::operator=( owned_data_ = heap_alloc_if_necessary((*expression).size()); reset_pointer_vector((*expression).size()); } else if (not owning_) { - ASSERT((*expression).size() == size(), "Must copy into same size, not " + ASSERT((*expression).size() == size(), "Must assign into same size, not " << (*expression).size() << " into " << size()); } diff --git a/tests/Unit/DataStructures/Test_ComplexDataVectorAsserts.cpp b/tests/Unit/DataStructures/Test_ComplexDataVectorAsserts.cpp index 51bf8e5b0108..586af7384672 100644 --- a/tests/Unit/DataStructures/Test_ComplexDataVectorAsserts.cpp +++ b/tests/Unit/DataStructures/Test_ComplexDataVectorAsserts.cpp @@ -14,7 +14,7 @@ #include "Helpers/DataStructures/VectorImplTestHelper.hpp" #include "Utilities/ErrorHandling/Error.hpp" -// [[OutputRegex, Must copy into same size]] +// [[OutputRegex, Must assign into same size]] [[noreturn]] SPECTRE_TEST_CASE( "Unit.DataStructures.ComplexDataVector.ExpressionAssignError", "[Unit][DataStructures]") { diff --git a/tests/Unit/DataStructures/Test_ComplexDiagonalModalOperator.cpp b/tests/Unit/DataStructures/Test_ComplexDiagonalModalOperator.cpp index e6a29523b6cc..322e0c5442be 100644 --- a/tests/Unit/DataStructures/Test_ComplexDiagonalModalOperator.cpp +++ b/tests/Unit/DataStructures/Test_ComplexDiagonalModalOperator.cpp @@ -18,7 +18,7 @@ // IWYU pragma: no_forward_declare DiagonalModalOperator; // IWYU pragma: no_include -// [[OutputRegex, Must copy into same size]] +// [[OutputRegex, Must assign into same size]] [[noreturn]] SPECTRE_TEST_CASE( "Unit.DataStructures.ComplexDiagonalModalOperator.ExpressionAssignError", "[Unit][DataStructures]") { diff --git a/tests/Unit/DataStructures/Test_ComplexModalVector.cpp b/tests/Unit/DataStructures/Test_ComplexModalVector.cpp index a5da1f193ad1..51f664d47b20 100644 --- a/tests/Unit/DataStructures/Test_ComplexModalVector.cpp +++ b/tests/Unit/DataStructures/Test_ComplexModalVector.cpp @@ -18,7 +18,7 @@ // IWYU pragma: no_include -// [[OutputRegex, Must copy into same size]] +// [[OutputRegex, Must assign into same size]] [[noreturn]] SPECTRE_TEST_CASE( "Unit.DataStructures.ComplexModalVector.ExpressionAssignError", "[Unit][DataStructures]") { diff --git a/tests/Unit/DataStructures/Test_DataVectorAsserts.cpp b/tests/Unit/DataStructures/Test_DataVectorAsserts.cpp index ecf154a3240a..c0b1912e7950 100644 --- a/tests/Unit/DataStructures/Test_DataVectorAsserts.cpp +++ b/tests/Unit/DataStructures/Test_DataVectorAsserts.cpp @@ -14,7 +14,7 @@ #include "Helpers/DataStructures/VectorImplTestHelper.hpp" #include "Utilities/ErrorHandling/Error.hpp" -// [[OutputRegex, Must copy into same size]] +// [[OutputRegex, Must assign into same size]] [[noreturn]] SPECTRE_TEST_CASE( "Unit.DataStructures.DataVector.ExpressionAssignError", "[Unit][DataStructures]") { diff --git a/tests/Unit/DataStructures/Test_DiagonalModalOperator.cpp b/tests/Unit/DataStructures/Test_DiagonalModalOperator.cpp index b8c3e55bde2e..9488f33b4a02 100644 --- a/tests/Unit/DataStructures/Test_DiagonalModalOperator.cpp +++ b/tests/Unit/DataStructures/Test_DiagonalModalOperator.cpp @@ -15,7 +15,7 @@ // IWYU pragma: no_include -// [[OutputRegex, Must copy into same size]] +// [[OutputRegex, Must assign into same size]] [[noreturn]] SPECTRE_TEST_CASE( "Unit.DataStructures.DiagonalModalOperator.ExpressionAssignError", "[Unit][DataStructures]") { diff --git a/tests/Unit/DataStructures/Test_ModalVector.cpp b/tests/Unit/DataStructures/Test_ModalVector.cpp index 3b298753a8bb..01a4175a2a2e 100644 --- a/tests/Unit/DataStructures/Test_ModalVector.cpp +++ b/tests/Unit/DataStructures/Test_ModalVector.cpp @@ -16,7 +16,7 @@ // IWYU pragma: no_include -// [[OutputRegex, Must copy into same size]] +// [[OutputRegex, Must assign into same size]] [[noreturn]] SPECTRE_TEST_CASE( "Unit.DataStructures.ModalVector.ExpressionAssignError", "[Unit][DataStructures]") { diff --git a/tests/Unit/DataStructures/Test_NonZeroStaticSizeVector.cpp b/tests/Unit/DataStructures/Test_NonZeroStaticSizeVector.cpp index e6ebfaa47eb8..c2261372bc39 100644 --- a/tests/Unit/DataStructures/Test_NonZeroStaticSizeVector.cpp +++ b/tests/Unit/DataStructures/Test_NonZeroStaticSizeVector.cpp @@ -69,7 +69,7 @@ void test_asserts() { CustomStaticSizeVector>( TestHelpers::VectorImpl::RefSizeErrorTestKind::ExpressionAssign); }()), - Catch::Contains("Must copy into same size")); + Catch::Contains("Must assign into same size")); CHECK_THROWS_WITH(([]() { TestHelpers::VectorImpl::vector_ref_test_size_error< diff --git a/tests/Unit/DataStructures/Test_Variables.cpp b/tests/Unit/DataStructures/Test_Variables.cpp index d5d0a2049f21..a54400ec5695 100644 --- a/tests/Unit/DataStructures/Test_Variables.cpp +++ b/tests/Unit/DataStructures/Test_Variables.cpp @@ -1327,14 +1327,14 @@ void test_asserts() { auto& tensor_in_vars = get>(vars); tensor_in_vars = tnsr::I{10_st, -4.0}; }()), - Catch::Contains("Must copy into same size, not 10 into 1")); + Catch::Contains("Must move into same size, not 10 into 1")); CHECK_THROWS_WITH( ([]() { Variables>> vars; get>(vars) = Scalar{{{{0.}}}}; }()), - Catch::Contains("Must copy into same size, not 1 into 0")); + Catch::Contains("Must move into same size, not 1 into 0")); CHECK_THROWS_WITH( ([]() { Variables, diff --git a/tests/Unit/Helpers/DataStructures/VectorImplTestHelper.hpp b/tests/Unit/Helpers/DataStructures/VectorImplTestHelper.hpp index d4ec3d2f0309..1ecb7ea78896 100644 --- a/tests/Unit/Helpers/DataStructures/VectorImplTestHelper.hpp +++ b/tests/Unit/Helpers/DataStructures/VectorImplTestHelper.hpp @@ -626,7 +626,7 @@ enum RefSizeErrorTestKind { Copy, ExpressionAssign, Move }; /// appropriately generates an error. /// /// \details a calling function should be an `ASSERTION_TEST()` and check for -/// the string "Must copy into same size". +/// the string "Must copy/move/assign into same size". /// Three types of tests are provided and one must be provided as the first /// function argument: /// - `RefSizeErrorTestKind::Copy`: Checks that copy-assigning to a non-owning From 8c2810ea1a4385edb7320244bc7938ed15ac857f Mon Sep 17 00:00:00 2001 From: Kyle Nelli Date: Wed, 15 Feb 2023 15:26:11 -0800 Subject: [PATCH 2/5] Add functions to MortarData to retrieve non-const references --- .../DiscontinuousGalerkin/MortarData.hpp | 12 ++ .../DiscontinuousGalerkin/Test_MortarData.cpp | 138 +++++++++++++----- 2 files changed, 112 insertions(+), 38 deletions(-) diff --git a/src/Evolution/DiscontinuousGalerkin/MortarData.hpp b/src/Evolution/DiscontinuousGalerkin/MortarData.hpp index c14aebcab0d2..bf57acba9535 100644 --- a/src/Evolution/DiscontinuousGalerkin/MortarData.hpp +++ b/src/Evolution/DiscontinuousGalerkin/MortarData.hpp @@ -145,6 +145,8 @@ class MortarData { const TimeStepId& time_step_id() const { return time_step_id_; } + TimeStepId& time_step_id() { return time_step_id_; } + auto local_mortar_data() const -> const std::optional, DataVector>>& { return local_mortar_data_; @@ -155,6 +157,16 @@ class MortarData { return neighbor_mortar_data_; } + auto local_mortar_data() + -> std::optional, DataVector>>& { + return local_mortar_data_; + } + + auto neighbor_mortar_data() + -> std::optional, DataVector>>& { + return neighbor_mortar_data_; + } + // NOLINTNEXTLINE(google-runtime-references) void pup(PUP::er& p); diff --git a/tests/Unit/Evolution/DiscontinuousGalerkin/Test_MortarData.cpp b/tests/Unit/Evolution/DiscontinuousGalerkin/Test_MortarData.cpp index e1e848166dde..c4aef2bfecb9 100644 --- a/tests/Unit/Evolution/DiscontinuousGalerkin/Test_MortarData.cpp +++ b/tests/Unit/Evolution/DiscontinuousGalerkin/Test_MortarData.cpp @@ -25,6 +25,75 @@ namespace evolution::dg { namespace { +template +void assign_with_insert(const gsl::not_null*> mortar_data, + TimeStepId time_step_id, Mesh local_mesh, + std::optional local_data, + Mesh neighbor_mesh, + std::optional neighbor_data, + const std::string& expected_output) { + if (local_data.has_value()) { + mortar_data->insert_local_mortar_data(time_step_id, local_mesh, + *local_data); + + CHECK(mortar_data->local_mortar_data().has_value()); + CHECK_FALSE(mortar_data->neighbor_mortar_data().has_value()); + } + + if (neighbor_data.has_value()) { + mortar_data->insert_neighbor_mortar_data(time_step_id, neighbor_mesh, + *neighbor_data); + CHECK(mortar_data->local_mortar_data().has_value()); + CHECK(mortar_data->neighbor_mortar_data().has_value()); + } + + CHECK(get_output(*mortar_data) == expected_output); +} + +template +void assign_with_reference(const gsl::not_null*> mortar_data, + TimeStepId time_step_id, Mesh local_mesh, + std::optional local_data, + Mesh neighbor_mesh, + std::optional neighbor_data, + const std::string& expected_output) { + mortar_data->time_step_id() = time_step_id; + + if (local_data.has_value()) { + mortar_data->local_mortar_data() = std::pair{local_mesh, *local_data}; + + CHECK(mortar_data->local_mortar_data().has_value()); + CHECK_FALSE(mortar_data->neighbor_mortar_data().has_value()); + } + + if (neighbor_data.has_value()) { + mortar_data->neighbor_mortar_data() = + std::pair{neighbor_mesh, *neighbor_data}; + CHECK(mortar_data->local_mortar_data().has_value()); + CHECK(mortar_data->neighbor_mortar_data().has_value()); + } + + CHECK(get_output(*mortar_data) == expected_output); +} + +template +void check_serialization(const gsl::not_null*> mortar_data) { + const auto deserialized_mortar_data = serialize_and_deserialize(*mortar_data); + + CHECK(*mortar_data->local_mortar_data() == + *deserialized_mortar_data.local_mortar_data()); + CHECK(*mortar_data->neighbor_mortar_data() == + *deserialized_mortar_data.neighbor_mortar_data()); + + CHECK(*mortar_data == deserialized_mortar_data); + CHECK_FALSE(*mortar_data != deserialized_mortar_data); + + const auto extracted_data = mortar_data->extract(); + CHECK(extracted_data.first == *deserialized_mortar_data.local_mortar_data()); + CHECK(extracted_data.second == + *deserialized_mortar_data.neighbor_mortar_data()); +} + template void test_global_time_stepping_usage() { std::uniform_real_distribution dist(-1.0, 2.3); @@ -51,41 +120,32 @@ void test_global_time_stepping_usage() { fill_with_random_values(make_not_null(&local_data), make_not_null(&gen), make_not_null(&dist)); - CHECK_FALSE(mortar_data.local_mortar_data().has_value()); - CHECK_FALSE(mortar_data.neighbor_mortar_data().has_value()); - - mortar_data.insert_local_mortar_data(time_step_id, local_mesh, local_data); - - CHECK(mortar_data.local_mortar_data().has_value()); - CHECK_FALSE(mortar_data.neighbor_mortar_data().has_value()); - - mortar_data.insert_neighbor_mortar_data(time_step_id, neighbor_mesh, - neighbor_data); - CHECK(mortar_data.local_mortar_data().has_value()); - CHECK(mortar_data.neighbor_mortar_data().has_value()); - std::string expected_output = MakeString{} << "TimeStepId: " << time_step_id << "\n" << "LocalMortarData: (" << local_mesh << ", " << local_data << ")\n" << "NeighborMortarData: (" << neighbor_mesh << ", " << neighbor_data << ")\n"; - CHECK(get_output(mortar_data) == expected_output); - const auto deserialized_mortar_data = serialize_and_deserialize(mortar_data); + CHECK_FALSE(mortar_data.local_mortar_data().has_value()); + CHECK_FALSE(mortar_data.neighbor_mortar_data().has_value()); - CHECK(*mortar_data.local_mortar_data() == - *deserialized_mortar_data.local_mortar_data()); - CHECK(*mortar_data.neighbor_mortar_data() == - *deserialized_mortar_data.neighbor_mortar_data()); + // First use the insert functions + assign_with_insert(make_not_null(&mortar_data), time_step_id, local_mesh, + std::optional{local_data}, neighbor_mesh, + std::optional{neighbor_data}, expected_output); - CHECK(mortar_data == deserialized_mortar_data); - CHECK_FALSE(mortar_data != deserialized_mortar_data); + check_serialization(make_not_null(&mortar_data)); - const auto extracted_data = mortar_data.extract(); - CHECK(extracted_data.first == *deserialized_mortar_data.local_mortar_data()); - CHECK(extracted_data.second == - *deserialized_mortar_data.neighbor_mortar_data()); + mortar_data = MortarData{}; + + // Then assign things with the local_mortar_data() non-const reference + // functions + assign_with_reference(make_not_null(&mortar_data), time_step_id, local_mesh, + std::optional{local_data}, neighbor_mesh, + std::optional{neighbor_data}, expected_output); + + check_serialization(make_not_null(&mortar_data)); } template @@ -109,13 +169,21 @@ void test_local_time_stepping_usage(const bool use_gauss_points) { fill_with_random_values(make_not_null(&local_data), make_not_null(&gen), make_not_null(&dist)); - CHECK_FALSE(mortar_data.local_mortar_data().has_value()); - CHECK_FALSE(mortar_data.neighbor_mortar_data().has_value()); + std::string expected_output = MakeString{} + << "TimeStepId: " << time_step_id << "\n" + << "LocalMortarData: (" << local_mesh << ", " + << local_data << ")\n" + << "NeighborMortarData: --\n"; - mortar_data.insert_local_mortar_data(time_step_id, local_mesh, local_data); + assign_with_insert(make_not_null(&mortar_data), time_step_id, local_mesh, + std::optional{local_data}, local_mesh, std::nullopt, + expected_output); - CHECK(mortar_data.local_mortar_data().has_value()); - CHECK_FALSE(mortar_data.neighbor_mortar_data().has_value()); + mortar_data = MortarData{}; + + assign_with_reference(make_not_null(&mortar_data), time_step_id, local_mesh, + std::optional{local_data}, local_mesh, std::nullopt, + expected_output); const auto local_volume_det_inv_jacobian = make_with_random_values>( @@ -162,13 +230,8 @@ void test_local_time_stepping_usage(const bool use_gauss_points) { check_geometric_quantities(mortar_data); - std::string expected_output = MakeString{} - << "TimeStepId: " << time_step_id << "\n" - << "LocalMortarData: (" << local_mesh << ", " - << local_data << ")\n" - << "NeighborMortarData: --\n"; - CHECK(get_output(mortar_data) == expected_output); - + // We don't use the check_serialization function from above because we didn't + // insert neighbor data here const auto deserialized_mortar_data = serialize_and_deserialize(mortar_data); CHECK(*mortar_data.local_mortar_data() == @@ -186,7 +249,6 @@ void test() { test_local_time_stepping_usage(true); test_local_time_stepping_usage(false); } - } // namespace SPECTRE_TEST_CASE("Unit.Evolution.DG.MortarData", "[Unit][Evolution]") { From d14ca5f384a854b54ebfc8f65d5ab43338329ac0 Mon Sep 17 00:00:00 2001 From: Kyle Nelli Date: Wed, 15 Feb 2023 16:17:57 -0800 Subject: [PATCH 3/5] Add not_null version of project_to/from_mortar --- .../DiscontinuousGalerkin/MortarHelpers.hpp | 47 ++++++++++++++++--- 1 file changed, 40 insertions(+), 7 deletions(-) diff --git a/src/NumericalAlgorithms/DiscontinuousGalerkin/MortarHelpers.hpp b/src/NumericalAlgorithms/DiscontinuousGalerkin/MortarHelpers.hpp index 1d38d470cd9e..dc8a472e7f8c 100644 --- a/src/NumericalAlgorithms/DiscontinuousGalerkin/MortarHelpers.hpp +++ b/src/NumericalAlgorithms/DiscontinuousGalerkin/MortarHelpers.hpp @@ -67,20 +67,52 @@ MortarSize mortar_size(const ElementId& self, size_t dimension, const OrientationMap& orientation); +/// @{ /// \ingroup DiscontinuousGalerkinGroup /// Project variables from a face to a mortar. +template +void project_to_mortar(const gsl::not_null*> result, + const Variables& vars, const Mesh& face_mesh, + const Mesh& mortar_mesh, + const MortarSize& mortar_size) { + const auto projection_matrices = Spectral::projection_matrix_parent_to_child( + face_mesh, mortar_mesh, mortar_size); + // We don't add an ASSERT about sizes here because there's already one in + // apply_matrices + apply_matrices(result, projection_matrices, vars, face_mesh.extents()); +} + template Variables project_to_mortar(const Variables& vars, const Mesh& face_mesh, const Mesh& mortar_mesh, const MortarSize& mortar_size) { - const auto projection_matrices = Spectral::projection_matrix_parent_to_child( - face_mesh, mortar_mesh, mortar_size); - return apply_matrices(projection_matrices, vars, face_mesh.extents()); + Variables result{mortar_mesh.number_of_grid_points()}; + project_to_mortar(make_not_null(&result), vars, face_mesh, mortar_mesh, + mortar_size); + return result; } +/// @} +/// @{ /// \ingroup DiscontinuousGalerkinGroup /// Project variables from a mortar to a face. +template +void project_from_mortar(const gsl::not_null*> result, + const Variables& vars, + const Mesh& face_mesh, + const Mesh& mortar_mesh, + const MortarSize& mortar_size) { + ASSERT(Spectral::needs_projection(face_mesh, mortar_mesh, mortar_size), + "project_from_mortar should not be called if the interface mesh and " + "mortar mesh are identical. Please elide the copy instead."); + const auto projection_matrices = Spectral::projection_matrix_child_to_parent( + mortar_mesh, face_mesh, mortar_size); + // We don't add an ASSERT about sizes here because there's already one in + // apply_matrices + apply_matrices(result, projection_matrices, vars, mortar_mesh.extents()); +} + template Variables project_from_mortar(const Variables& vars, const Mesh& face_mesh, @@ -89,9 +121,10 @@ Variables project_from_mortar(const Variables& vars, ASSERT(Spectral::needs_projection(face_mesh, mortar_mesh, mortar_size), "project_from_mortar should not be called if the interface mesh and " "mortar mesh are identical. Please elide the copy instead."); - const auto projection_matrices = Spectral::projection_matrix_child_to_parent( - mortar_mesh, face_mesh, mortar_size); - return apply_matrices(projection_matrices, vars, mortar_mesh.extents()); + Variables result{face_mesh.number_of_grid_points()}; + project_from_mortar(make_not_null(&result), vars, face_mesh, mortar_mesh, + mortar_size); + return result; } - +/// @} } // namespace dg From 7456bf293f4da243972cd424d5cbdb5084697783 Mon Sep 17 00:00:00 2001 From: Kyle Nelli Date: Wed, 15 Feb 2023 16:41:47 -0800 Subject: [PATCH 4/5] Use not_null version of project_to_mortar in MortarDataImpl --- .../Actions/InternalMortarDataImpl.hpp | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/src/Evolution/DiscontinuousGalerkin/Actions/InternalMortarDataImpl.hpp b/src/Evolution/DiscontinuousGalerkin/Actions/InternalMortarDataImpl.hpp index 0b36737c4430..9c611f9cb095 100644 --- a/src/Evolution/DiscontinuousGalerkin/Actions/InternalMortarDataImpl.hpp +++ b/src/Evolution/DiscontinuousGalerkin/Actions/InternalMortarDataImpl.hpp @@ -263,14 +263,10 @@ void internal_mortar_data_impl( Variables::number_of_independent_components}; Variables projected_packaged_data{ boundary_data_on_mortar.data(), boundary_data_on_mortar.size()}; - // Since projected_packaged_data is non-owning, if we tried to - // move-assign it with the result of ::dg::project_to_mortar, it would - // then become owning and the buffer it previously pointed to wouldn't - // be filled (and we want it to be filled). So instead force a - // copy-assign by having an intermediary, data_to_copy. - const auto data_to_copy = ::dg::project_to_mortar( - packaged_data, face_mesh, mortar_mesh, mortar_size); - projected_packaged_data = data_to_copy; + + ::dg::project_to_mortar(make_not_null(&projected_packaged_data), + packaged_data, face_mesh, mortar_mesh, + mortar_size); } else { ASSERT(neighbors_in_direction.size() == 1, From 7840d9d9e79723b5a4fb3cf6bc04c03919589447 Mon Sep 17 00:00:00 2001 From: Kyle Nelli Date: Thu, 16 Feb 2023 14:54:24 -0800 Subject: [PATCH 5/5] Use buffers for projections in InternalMortarDataImpl --- .../Actions/ComputeTimeDerivative.hpp | 29 ++++- .../Actions/InternalMortarDataImpl.hpp | 123 +++++++++++++----- 2 files changed, 116 insertions(+), 36 deletions(-) diff --git a/src/Evolution/DiscontinuousGalerkin/Actions/ComputeTimeDerivative.hpp b/src/Evolution/DiscontinuousGalerkin/Actions/ComputeTimeDerivative.hpp index 7e18a94bcbca..516dd3123a69 100644 --- a/src/Evolution/DiscontinuousGalerkin/Actions/ComputeTimeDerivative.hpp +++ b/src/Evolution/DiscontinuousGalerkin/Actions/ComputeTimeDerivative.hpp @@ -76,6 +76,10 @@ template struct get_dg_package_temporary_tags { using type = typename T::dg_package_data_temporary_tags; }; +template +struct get_dg_package_field_tags { + using type = typename T::dg_package_field_tags; +}; template struct get_primitive_tags_for_face { using type = typename get_primitive_vars< @@ -420,6 +424,12 @@ ComputeTimeDerivative:: tmpl::list>, detail::OneOverNormalVectorMagnitude, detail::NormalVector>>>; + // To avoid additional allocations in internal_mortar_data, we provide a + // buffer used to compute the packaged data before it has to be projected to + // the mortar. We get all mortar tags for similar reasons as described above + using all_mortar_tags = tmpl::remove_duplicates>>>; // We also don't use the number of volume mesh grid points. We instead use the // max number of grid points from each face. That way, our allocation will be @@ -458,6 +468,7 @@ ComputeTimeDerivative:: ::Tags::div, db::wrap_tags_in<::Tags::Flux, flux_variables, tmpl::size_t, Frame::Inertial>>>; using VarsFaceTemporaries = Variables; + using DgPackagedDataVarsOnFace = Variables; const size_t number_of_grid_points = mesh.number_of_grid_points(); auto buffer = cpp20::make_unique_for_overwrite( (VarsTemporaries::number_of_independent_components + @@ -467,7 +478,8 @@ ComputeTimeDerivative:: number_of_grid_points + // Different number of grid points. See explanation above where // num_face_temporary_grid_points is defined - VarsFaceTemporaries::number_of_independent_components * + (VarsFaceTemporaries::number_of_independent_components + + DgPackagedDataVarsOnFace::number_of_independent_components) * num_face_temporary_grid_points); VarsTemporaries temporaries{ &buffer[0], VarsTemporaries::number_of_independent_components * @@ -500,6 +512,18 @@ ComputeTimeDerivative:: // num_face_temporary_grid_points is defined VarsFaceTemporaries::number_of_independent_components * num_face_temporary_grid_points); + gsl::span packaged_data_buffer = gsl::make_span( + &buffer[(VarsTemporaries::number_of_independent_components + + VarsFluxes::number_of_independent_components + + VarsPartialDerivatives::number_of_independent_components + + VarsDivFluxes::number_of_independent_components) * + number_of_grid_points + + VarsFaceTemporaries::number_of_independent_components * + num_face_temporary_grid_points], + // Different number of grid points. See explanation above where + // num_face_temporary_grid_points is defined + DgPackagedDataVarsOnFace::number_of_independent_components * + num_face_temporary_grid_points); const Scalar* det_inverse_jacobian = nullptr; if constexpr (tmpl::size::value != 0) { @@ -549,7 +573,7 @@ ComputeTimeDerivative:: "final."); tmpl::for_each( [&boundary_correction, &box, &partial_derivs, &primitive_vars, - &temporaries, &volume_fluxes, + &temporaries, &volume_fluxes, &packaged_data_buffer, &face_temporaries](auto derived_correction_v) { using DerivedCorrection = tmpl::type_from; @@ -562,6 +586,7 @@ ComputeTimeDerivative:: // - evolution::dg::Tags::MortarData detail::internal_mortar_data( make_not_null(&box), make_not_null(&face_temporaries), + make_not_null(&packaged_data_buffer), dynamic_cast(boundary_correction), db::get(box), volume_fluxes, temporaries, primitive_vars, diff --git a/src/Evolution/DiscontinuousGalerkin/Actions/InternalMortarDataImpl.hpp b/src/Evolution/DiscontinuousGalerkin/Actions/InternalMortarDataImpl.hpp index 9c611f9cb095..365d53ac4bf9 100644 --- a/src/Evolution/DiscontinuousGalerkin/Actions/InternalMortarDataImpl.hpp +++ b/src/Evolution/DiscontinuousGalerkin/Actions/InternalMortarDataImpl.hpp @@ -52,6 +52,7 @@ void internal_mortar_data_impl( boost::hash, ElementId>>>*> mortar_data_ptr, const gsl::not_null*> face_temporaries, + const gsl::not_null*> packaged_data_buffer, const BoundaryCorrection& boundary_correction, const Variables& volume_evolved_vars, @@ -97,8 +98,6 @@ void internal_mortar_data_impl( FieldsOnFace fields_on_face{}; std::optional> face_mesh_velocity{}; for (const auto& [direction, neighbors_in_direction] : element.neighbors()) { - (void)neighbors_in_direction; // unused variable - const Mesh face_mesh = volume_mesh.slice_away(direction.dimension()); @@ -230,53 +229,108 @@ void internal_mortar_data_impl( "have been. Direction: " << direction); - // We point the Variables inside the DataVector because the DataVector - // will be moved later on so we want it owning its data - DataVector packaged_data_buffer{ + const size_t total_face_size = face_mesh.number_of_grid_points() * - Variables::number_of_independent_components}; - Variables packaged_data{packaged_data_buffer.data(), - packaged_data_buffer.size()}; + Variables::number_of_independent_components; + Variables packaged_data{}; + + // This is the case where we only have one neighbor in this direction, so we + // may or may not have to do any projection. If we don't have to do + // projection, then we can use the local_mortar_data itself to calculate the + // dg_package_data. However, if we need to project, then we hae to use the + // packaged_data_buffer that was passed in. + if (neighbors_in_direction.size() == 1) { + const auto& neighbor = *neighbors_in_direction.begin(); + const auto mortar_id = std::pair{direction, neighbor}; + const auto& mortar_mesh = mortar_meshes.at(mortar_id); + const auto& mortar_size = mortar_sizes.at(mortar_id); - const double max_abs_char_speed_on_face = detail::dg_package_data( + // Have to use packaged_data_buffer + if (Spectral::needs_projection(face_mesh, mortar_mesh, mortar_size)) { + // The face mesh will be assigned below along with ensuring the size of + // the mortar data is correct + packaged_data.set_data_ref(packaged_data_buffer->data(), + total_face_size); + } else { + // Can use the local_mortar_data + auto& local_mortar_data_opt = + mortar_data_ptr->at(mortar_id).local_mortar_data(); + // If this isn't the first time, set the face mesh + if (LIKELY(local_mortar_data_opt.has_value())) { + local_mortar_data_opt->first = face_mesh; + } else { + // Otherwise we need to initialize the pair. If we don't do this, then + // the DataVector will be non-owning which we don't want + local_mortar_data_opt = + std::optional{std::pair{face_mesh, DataVector{}}}; + } + + // Always set the time step ID + mortar_data_ptr->at(mortar_id).time_step_id() = temporal_id; + + DataVector& local_mortar_data = local_mortar_data_opt->second; + + // Do a destructive resize to account for potential p-refinement + local_mortar_data.destructive_resize(total_face_size); + + packaged_data.set_data_ref(local_mortar_data.data(), + local_mortar_data.size()); + } + } else { + // In this case, we have multiple neighbors in this direction so all will + // need to project their data which means we use the + // packaged_data_buffer to calculate the dg_package_data + packaged_data.set_data_ref(packaged_data_buffer->data(), total_face_size); + } + + detail::dg_package_data( make_not_null(&packaged_data), boundary_correction, fields_on_face, get>( *normal_covector_and_magnitude_ptr->at(direction)), face_mesh_velocity, dg_package_data_projected_tags{}, package_data_volume_args...); - (void)max_abs_char_speed_on_face; // Perform step 3 + // This will only do something if + // a) we have multiple neighbors in this direction + // or + // b) the one (and only) neighbor in this direction needed projection for (const auto& neighbor : neighbors_in_direction) { const auto mortar_id = std::make_pair(direction, neighbor); const auto& mortar_mesh = mortar_meshes.at(mortar_id); const auto& mortar_size = mortar_sizes.at(mortar_id); - // Project the data from the face to the mortar if necessary. - // We can't move the data or modify it in-place when projecting, because - // in that case the face may touch two mortars so we need to keep the - // data around. - DataVector boundary_data_on_mortar{}; if (Spectral::needs_projection(face_mesh, mortar_mesh, mortar_size)) { - boundary_data_on_mortar = DataVector{ + auto& local_mortar_data_opt = + mortar_data_ptr->at(mortar_id).local_mortar_data(); + + // If this isn't the first time, set the face mesh + if (LIKELY(local_mortar_data_opt.has_value())) { + local_mortar_data_opt->first = face_mesh; + } else { + // If we don't do this, then the DataVector will be non-owning which + // we don't want + local_mortar_data_opt = + std::optional{std::pair{face_mesh, DataVector{}}}; + } + + // Set the time id since above we only set it for cases that didn't need + // projection + mortar_data_ptr->at(mortar_id).time_step_id() = temporal_id; + + DataVector& local_mortar_data = local_mortar_data_opt->second; + + // Do a destructive resize to account for potential p-refinement + local_mortar_data.destructive_resize( mortar_mesh.number_of_grid_points() * - Variables::number_of_independent_components}; - Variables projected_packaged_data{ - boundary_data_on_mortar.data(), boundary_data_on_mortar.size()}; + Variables::number_of_independent_components); + Variables projected_packaged_data{ + local_mortar_data.data(), local_mortar_data.size()}; ::dg::project_to_mortar(make_not_null(&projected_packaged_data), packaged_data, face_mesh, mortar_mesh, mortar_size); - - } else { - ASSERT(neighbors_in_direction.size() == 1, - "Expected only 1 neighbor in the local direction but got " - << neighbors_in_direction.size() << "instead"); - boundary_data_on_mortar = std::move(packaged_data_buffer); } - - mortar_data_ptr->at(mortar_id).insert_local_mortar_data( - temporal_id, face_mesh, std::move(boundary_data_on_mortar)); } } } @@ -286,6 +340,7 @@ template *> box, const gsl::not_null*> face_temporaries, + const gsl::not_null*> packaged_data_buffer, const BoundaryCorrection& boundary_correction, const Variables evolved_variables, @@ -301,7 +356,7 @@ void internal_mortar_data( db::mutate, evolution::dg::Tags::MortarData>( box, - [&boundary_correction, &face_temporaries, + [&boundary_correction, &face_temporaries, &packaged_data_buffer, &element = db::get>(*box), &evolved_variables, &logical_to_inertial_inverse_jacobian = db::get( normal_covector_and_magnitude_ptr, mortar_data_ptr, - face_temporaries, boundary_correction, evolved_variables, - volume_fluxes, temporaries, primitive_vars, element, mesh, - mortar_meshes, mortar_sizes, time_step_id, moving_mesh_map, - mesh_velocity, logical_to_inertial_inverse_jacobian, - package_data_volume_args...); + face_temporaries, packaged_data_buffer, boundary_correction, + evolved_variables, volume_fluxes, temporaries, primitive_vars, + element, mesh, mortar_meshes, mortar_sizes, time_step_id, + moving_mesh_map, mesh_velocity, + logical_to_inertial_inverse_jacobian, package_data_volume_args...); }, db::get(*box)...); }