Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Crystal_Growth_Phase_Field_Model/solve.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
void PhaseFieldSolver::solve(){
//Using a direct parallel solver
SolverControl cn;
PETScWrappers::SparseDirectMUMPS A_direct(cn, mpi_communicator);
PETScWrappers::SparseDirectMUMPS A_direct(cn);
A_direct.solve(jacobian_matrix, solution_update, system_rhs);
//Updating the solution by adding the delta solution
conv_solution.add(1, solution_update);
Expand Down
9 changes: 3 additions & 6 deletions time_dependent_navier_stokes/time_dependent_navier_stokes.cc
Original file line number Diff line number Diff line change
Expand Up @@ -493,8 +493,7 @@ namespace fluid
TimerOutput::Scope timer_section(timer, "CG for Mp");
SolverControl mp_control(src.block(1).size(),
1e-6 * src.block(1).l2_norm());
PETScWrappers::SolverCG cg_mp(mp_control,
mass_schur->get_mpi_communicator());
PETScWrappers::SolverCG cg_mp(mp_control);
// $-(\nu + \gamma)M_p^{-1}v_1$
PETScWrappers::PreconditionBlockJacobi Mp_preconditioner;
Mp_preconditioner.initialize(mass_matrix->block(1, 1));
Expand All @@ -507,8 +506,7 @@ namespace fluid
TimerOutput::Scope timer_section(timer, "CG for Sm");
SolverControl sm_control(src.block(1).size(),
1e-6 * src.block(1).l2_norm());
PETScWrappers::SolverCG cg_sm(sm_control,
mass_schur->get_mpi_communicator());
PETScWrappers::SolverCG cg_sm(sm_control);
// PreconditionBlockJacobi works find on Sm if we do not refine the mesh.
// Because after refine_mesh is called, zero entries will be created on
// the diagonal (not sure why), which prevents PreconditionBlockJacobi
Expand All @@ -531,8 +529,7 @@ namespace fluid
TimerOutput::Scope timer_section(timer, "CG for A");
SolverControl a_control(src.block(0).size(),
1e-6 * src.block(0).l2_norm());
PETScWrappers::SolverCG cg_a(a_control,
mass_schur->get_mpi_communicator());
PETScWrappers::SolverCG cg_a(a_control);
// We do not use any preconditioner for this block, which is of course
// slow,
// only because the performance of the only two preconditioners available
Expand Down
2 changes: 1 addition & 1 deletion two_phase_flow/LevelSetSolver.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1524,7 +1524,7 @@ void LevelSetSolver<dim>::solve(const AffineConstraints<double> &constraints,
{
// all vectors are NON-GHOSTED
SolverControl solver_control (dof_handler_LS.n_dofs(), solver_tolerance);
PETScWrappers::SolverCG solver(solver_control, mpi_communicator);
PETScWrappers::SolverCG solver(solver_control);
constraints.distribute (completely_distributed_solution);
solver.solve (Matrix, completely_distributed_solution, rhs, *preconditioner);
constraints.distribute (completely_distributed_solution);
Expand Down
12 changes: 6 additions & 6 deletions two_phase_flow/NavierStokesSolver.cc
Original file line number Diff line number Diff line change
Expand Up @@ -1014,10 +1014,10 @@ void NavierStokesSolver<dim>::solve_U(const AffineConstraints<double> &constrain
const PETScWrappers::MPI::Vector &rhs)
{
SolverControl solver_control(dof_handler_U.n_dofs(),1e-6);
//PETScWrappers::SolverCG solver(solver_control, mpi_communicator);
//PETScWrappers::SolverGMRES solver(solver_control, mpi_communicator);
//PETScWrappers::SolverChebychev solver(solver_control, mpi_communicator);
PETScWrappers::SolverBicgstab solver(solver_control,mpi_communicator);
//PETScWrappers::SolverCG solver(solver_control);
//PETScWrappers::SolverGMRES solver(solver_control);
//PETScWrappers::SolverChebychev solver(solver_control);
PETScWrappers::SolverBicgstab solver(solver_control);
constraints.distribute(completely_distributed_solution);
solver.solve(Matrix,completely_distributed_solution,rhs,*preconditioner);
constraints.distribute(completely_distributed_solution);
Expand All @@ -1035,8 +1035,8 @@ void NavierStokesSolver<dim>::solve_P(const AffineConstraints<double> &constrain
const PETScWrappers::MPI::Vector &rhs)
{
SolverControl solver_control(dof_handler_P.n_dofs(),1e-6);
PETScWrappers::SolverCG solver(solver_control,mpi_communicator);
//PETScWrappers::SolverGMRES solver(solver_control, mpi_communicator);
PETScWrappers::SolverCG solver(solver_control);
//PETScWrappers::SolverGMRES solver(solver_control);
constraints.distribute(completely_distributed_solution);
solver.solve(Matrix,completely_distributed_solution,rhs,*preconditioner);
constraints.distribute(completely_distributed_solution);
Expand Down