Skip to content

Commit

Permalink
Merge pull request #13961 from kronbichler/fix_petsc_complex_tests_2
Browse files Browse the repository at this point in the history
Fix more PETSc complex tests
  • Loading branch information
tamiko committed Jun 13, 2022
2 parents 0aba222 + c598c25 commit 9c96714
Show file tree
Hide file tree
Showing 5 changed files with 21 additions and 24 deletions.
13 changes: 7 additions & 6 deletions tests/simplex/step-17.cc
Original file line number Diff line number Diff line change
Expand Up @@ -311,12 +311,13 @@ namespace Step17
system_matrix.compress(VectorOperation::add);
system_rhs.compress(VectorOperation::add);

std::map<types::global_dof_index, double> boundary_values;
VectorTools::interpolate_boundary_values(mapping,
dof_handler,
0,
Functions::ZeroFunction<dim>(dim),
boundary_values);
std::map<types::global_dof_index, PetscScalar> boundary_values;
VectorTools::interpolate_boundary_values(
mapping,
dof_handler,
0,
Functions::ZeroFunction<dim, PetscScalar>(dim),
boundary_values);
MatrixTools::apply_boundary_values(
boundary_values, system_matrix, solution, system_rhs, false);
}
Expand Down
15 changes: 8 additions & 7 deletions tests/simplex/step-18.cc
Original file line number Diff line number Diff line change
Expand Up @@ -639,13 +639,14 @@ namespace Step18
#endif


FEValuesExtractors::Scalar z_component(dim - 1);
std::map<types::global_dof_index, double> boundary_values;
VectorTools::interpolate_boundary_values(mapping,
dof_handler,
0,
Functions::ZeroFunction<dim>(dim),
boundary_values);
FEValuesExtractors::Scalar z_component(dim - 1);
std::map<types::global_dof_index, PetscScalar> boundary_values;
VectorTools::interpolate_boundary_values(
mapping,
dof_handler,
0,
Functions::ZeroFunction<dim, PetscScalar>(dim),
boundary_values);
VectorTools::interpolate_boundary_values(
mapping,
dof_handler,
Expand Down
9 changes: 2 additions & 7 deletions tests/simplex/step-40.cc
Original file line number Diff line number Diff line change
Expand Up @@ -288,7 +288,7 @@ namespace Step40
system_rhs,
preconditioner),
solver_control.last_step(),
5,
1,
9);

constraints.distribute(completely_distributed_solution);
Expand Down Expand Up @@ -340,12 +340,7 @@ namespace Step40
void
LaplaceProblem<dim>::run()
{
deallog << "Running with "
#ifdef USE_PETSC_LA
<< "PETSc"
#else
<< "Trilinos"
#endif
deallog << "Running "
<< " on " << Utilities::MPI::n_mpi_processes(mpi_communicator)
<< " MPI rank(s)..." << std::endl;

Expand Down
4 changes: 2 additions & 2 deletions tests/simplex/step-40.mpirun=1.with_petsc=true.output
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@

DEAL::Running with PETSc on 1 MPI rank(s)...
DEAL::Running on 1 MPI rank(s)...
DEAL:: Number of active cells: 512
DEAL:: Number of degrees of freedom: 1089
DEAL::Solver stopped within 5 - 9 iterations
DEAL::Solver stopped within 1 - 9 iterations
DEAL::0.0124965
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@

DEAL::Running with PETSc on 4 MPI rank(s)...
DEAL::Running on 4 MPI rank(s)...
DEAL:: Number of active cells: 512
DEAL:: Number of degrees of freedom: 1089
DEAL::Solver stopped within 5 - 9 iterations
DEAL::Solver stopped within 1 - 9 iterations
DEAL::0.0124965

0 comments on commit 9c96714

Please sign in to comment.