Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 14 additions & 0 deletions include/reduced_basis/rb_construction_base.h
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,12 @@ class RBConstructionBase : public Base, public RBParametrized
bool is_quiet() const
{ return this->quiet_mode; }

/**
* Set the boolean option that indicates if we normalization
* solution snapshots or not.
*/
void set_normalize_solution_snapshots(bool value);

/**
* Get the number of global training samples.
*/
Expand Down Expand Up @@ -266,6 +272,14 @@ class RBConstructionBase : public Base, public RBParametrized
*/
bool serial_training_set;

/**
* Set this boolean to true if we want to normalize solution snapshots
* used in training to have norm of 1. This is relevant if snapshots
* have differing magnitudes and we want to approximate them all with
* equal accuracy.
*/
bool _normalize_solution_snapshots;

/**
* We keep an extra temporary vector that is useful for
* performing inner products (avoids unnecessary memory
Expand Down
7 changes: 7 additions & 0 deletions include/reduced_basis/rb_eim_construction.h
Original file line number Diff line number Diff line change
Expand Up @@ -164,6 +164,13 @@ class RBEIMConstruction : public RBConstructionBase<System>
*/
virtual void print_info();

/**
* Rescale solution snapshots so that they all have unity norm. This is relevant
* if training samples have differing magnitudes and we want to approximate them
* all with equal accuracy.
*/
void apply_normalization_to_solution_snapshots();

/**
* Generate the EIM approximation for the specified parametrized function
* using either POD or the Greedy Algorithm. Return the final tolerance.
Expand Down
14 changes: 14 additions & 0 deletions src/reduced_basis/rb_construction.C
Original file line number Diff line number Diff line change
Expand Up @@ -1453,6 +1453,20 @@ void RBConstruction::train_reduced_basis_with_POD()
}
libMesh::out << std::endl;

if (_normalize_solution_snapshots)
{
libMesh::out << "Normalizing solution snapshots" << std::endl;
for (unsigned int i=0; i<n_snapshots; i++)
{
get_non_dirichlet_inner_product_matrix_if_avail()->vector_mult(
*inner_product_storage_vector, *POD_snapshots[i]);
Real norm = std::sqrt(std::real(POD_snapshots[i]->dot(*inner_product_storage_vector)));

if (norm > 0.)
POD_snapshots[i]->scale(1./norm);
}
}

// Set up the "correlation matrix"
DenseMatrix<Number> correlation_matrix(n_snapshots,n_snapshots);
for (unsigned int i=0; i<n_snapshots; i++)
Expand Down
7 changes: 7 additions & 0 deletions src/reduced_basis/rb_construction_base.C
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,7 @@ RBConstructionBase<Base>::RBConstructionBase (EquationSystems & es,
: Base(es, name_in, number_in),
quiet_mode(true),
serial_training_set(false),
_normalize_solution_snapshots(false),
_training_parameters_initialized(false),
_first_local_index(0),
_n_local_training_samples(0),
Expand Down Expand Up @@ -142,6 +143,12 @@ void RBConstructionBase<Base>::get_global_max_error_pair(const Parallel::Communi
communicator.broadcast(error_pair.first, proc_ID_index);
}

template <class Base>
void RBConstructionBase<Base>::set_normalize_solution_snapshots(bool value)
{
_normalize_solution_snapshots = value;
}

template <class Base>
numeric_index_type RBConstructionBase<Base>::get_n_training_samples() const
{
Expand Down
87 changes: 54 additions & 33 deletions src/reduced_basis/rb_eim_construction.C
Original file line number Diff line number Diff line change
Expand Up @@ -87,21 +87,6 @@ void add(DataMap & u, const Number k, const DataMap & v)
}
}

// Implement u <- k*u
template <typename DataMap>
void scale(DataMap & u, const Number k)
{
for (auto & it : u)
{
std::vector<std::vector<Number>> & outer_vec = it.second;
for (auto & inner_vec : outer_vec)
for (auto & value : inner_vec)
{
value *= k;
}
}
}

void add_node_data_map(RBEIMConstruction::NodeDataMap & u, const Number k, const RBEIMConstruction::NodeDataMap & v)
{
for (auto & [key, vec_u] : u)
Expand All @@ -117,18 +102,6 @@ void add_node_data_map(RBEIMConstruction::NodeDataMap & u, const Number k, const
}
}

void scale_node_data_map(RBEIMConstruction::NodeDataMap & u, const Number k)
{
for (auto & it : u)
{
std::vector<Number> & vec = it.second;
for (auto & value : vec)
{
value *= k;
}
}
}

}

RBEIMConstruction::RBEIMConstruction (EquationSystems & es,
Expand Down Expand Up @@ -434,6 +407,9 @@ void RBEIMConstruction::set_rb_construction_parameters(unsigned int n_training_s

Real RBEIMConstruction::train_eim_approximation()
{
if (_normalize_solution_snapshots)
apply_normalization_to_solution_snapshots();

if(best_fit_type_flag == POD_BEST_FIT)
{
train_eim_approximation_with_POD();
Expand Down Expand Up @@ -644,6 +620,51 @@ Real RBEIMConstruction::train_eim_approximation_with_greedy()
return greedy_error;
}

void RBEIMConstruction::apply_normalization_to_solution_snapshots()
{
LOG_SCOPE("apply_normalization_to_solution_snapshots()", "RBEIMConstruction");

libMesh::out << "Normalizing solution snapshots" << std::endl;

bool apply_comp_scaling = !get_rb_eim_evaluation().scale_components_in_enrichment().empty();
unsigned int n_snapshots = get_n_training_samples();
RBEIMEvaluation & rbe = get_rb_eim_evaluation();

for (unsigned int i=0; i<n_snapshots; i++)
{
if (rbe.get_parametrized_function().on_mesh_sides())
{
Real norm_val = std::sqrt(std::real(side_inner_product(
_local_side_parametrized_functions_for_training[i],
_local_side_parametrized_functions_for_training[i],
apply_comp_scaling)));

if (norm_val > 0.)
scale_parametrized_function(_local_side_parametrized_functions_for_training[i], 1./norm_val);
}
else if (rbe.get_parametrized_function().on_mesh_nodes())
{
Real norm_val = std::sqrt(std::real(node_inner_product(
_local_node_parametrized_functions_for_training[i],
_local_node_parametrized_functions_for_training[i],
apply_comp_scaling)));

if (norm_val > 0.)
scale_node_parametrized_function(_local_node_parametrized_functions_for_training[i], 1./norm_val);
}
else
{
Real norm_val = std::sqrt(std::real(inner_product(
_local_parametrized_functions_for_training[i],
_local_parametrized_functions_for_training[i],
apply_comp_scaling)));

if (norm_val > 0.)
scale_parametrized_function(_local_parametrized_functions_for_training[i], 1./norm_val);
}
}
}

Real RBEIMConstruction::train_eim_approximation_with_POD()
{
LOG_SCOPE("train_eim_approximation_with_POD()", "RBEIMConstruction");
Expand Down Expand Up @@ -823,13 +844,13 @@ Real RBEIMConstruction::train_eim_approximation_with_POD()

if (!is_zero_bf)
{
scale(v, 0.);
scale_parametrized_function(v, 0.);

for ( unsigned int i=0; i<n_snapshots; ++i )
add(v, U.el(i, j), _local_side_parametrized_functions_for_training[i] );

Real norm_v = std::sqrt(sigma(j));
scale(v, 1./norm_v);
scale_parametrized_function(v, 1./norm_v);
}

libmesh_try
Expand Down Expand Up @@ -889,13 +910,13 @@ Real RBEIMConstruction::train_eim_approximation_with_POD()

if (!is_zero_bf)
{
scale_node_data_map(v, 0.);
scale_node_parametrized_function(v, 0.);

for ( unsigned int i=0; i<n_snapshots; ++i )
add_node_data_map(v, U.el(i, j), _local_node_parametrized_functions_for_training[i] );

Real norm_v = std::sqrt(sigma(j));
scale_node_data_map(v, 1./norm_v);
scale_node_parametrized_function(v, 1./norm_v);
}

libmesh_try
Expand Down Expand Up @@ -955,13 +976,13 @@ Real RBEIMConstruction::train_eim_approximation_with_POD()

if (!is_zero_bf)
{
scale(v, 0.);
scale_parametrized_function(v, 0.);

for ( unsigned int i=0; i<n_snapshots; ++i )
add(v, U.el(i, j), _local_parametrized_functions_for_training[i] );

Real norm_v = std::sqrt(sigma(j));
scale(v, 1./norm_v);
scale_parametrized_function(v, 1./norm_v);
}

libmesh_try
Expand Down