Skip to content

Commit

Permalink
Make compile with deal.II version 9.6
Browse files Browse the repository at this point in the history
  • Loading branch information
kronbichler committed Jul 31, 2024
1 parent 6542462 commit 8bdd1b0
Show file tree
Hide file tree
Showing 4 changed files with 72 additions and 47 deletions.
19 changes: 19 additions & 0 deletions bps5/multigrid_cg.cc
Original file line number Diff line number Diff line change
Expand Up @@ -201,8 +201,10 @@ class LaplaceOperator
return diag;
}

#ifdef DEAL_II_WITH_PETSC
const PETScWrappers::MPI::SparseMatrix &
get_system_matrix() const;
#endif

private:
virtual void
Expand Down Expand Up @@ -234,7 +236,10 @@ class LaplaceOperator
void
compute_vertex_coefficients();

#ifdef DEAL_II_WITH_PETSC
mutable PETScWrappers::MPI::SparseMatrix system_matrix;
#endif

AlignedVector<
std::array<Tensor<1, dim, VectorizedArray<number>>, GeometryInfo<dim>::vertices_per_cell>>
cell_vertex_coefficients;
Expand Down Expand Up @@ -526,6 +531,8 @@ LaplaceOperator<dim, number>::local_compute_diagonal(



#ifdef DEAL_II_WITH_PETSC

template <int dim, typename number>
const PETScWrappers::MPI::SparseMatrix &
LaplaceOperator<dim, number>::get_system_matrix() const
Expand Down Expand Up @@ -566,6 +573,8 @@ LaplaceOperator<dim, number>::get_system_matrix() const
return this->system_matrix;
}

#endif


template <int dim, typename MatrixType>
class MGTransferManual : public MGTransferMatrixFree<dim, typename MatrixType::value_type>
Expand Down Expand Up @@ -600,6 +609,8 @@ class MGTransferManual : public MGTransferMatrixFree<dim, typename MatrixType::v



#ifdef DEAL_II_WITH_PETSC

template <typename number>
class MGCoarseSolverAMG : public MGCoarseGridBase<LinearAlgebra::distributed::Vector<number>>
{
Expand Down Expand Up @@ -724,6 +735,8 @@ class MGCoarseSolverAMG : public MGCoarseGridBase<LinearAlgebra::distributed::Ve
mutable double compute_time;
};

#endif



namespace Helper
Expand Down Expand Up @@ -1579,8 +1592,12 @@ LaplaceProblem<dim>::setup_coarse_solver()
{
if (do_coarse_amg)
{
#ifdef DEAL_II_WITH_PETSC
mg_coarse = std::make_unique<MGCoarseSolverAMG<typename LevelMatrixType::value_type>>(
mg_matrices[0].get_system_matrix(), 1);
#else
AssertThrow(false, ExcMessage("Can use AMG solver without PETSc"));
#endif
}
else
{
Expand Down Expand Up @@ -1672,9 +1689,11 @@ LaplaceProblem<dim>::solve()
<< mg_smoother_data[l].preconditioner->get_compute_time_and_reset() << "s ";
}
pcout << std::endl;
#ifdef DEAL_II_WITH_PETSC
if (auto coarse =
dynamic_cast<MGCoarseSolverAMG<typename LevelMatrixType::value_type> *>(mg_coarse.get()))
pcout << " coarse grid " << coarse->get_compute_time_and_reset() << "s" << std::endl;
#endif
pcout << "iterations: " << solver_control.last_step() << std::endl;
const unsigned int n_ranks = Utilities::MPI::n_mpi_processes(MPI_COMM_WORLD);
pcout << "throughput: "
Expand Down
9 changes: 8 additions & 1 deletion bps5_e-vector/multigrid_cg.cc
Original file line number Diff line number Diff line change
Expand Up @@ -252,6 +252,7 @@ class LaplaceOperatorEVector : public Poisson::LaplaceOperator<dim, 1, Number>



#ifdef DEAL_II_WITH_PETSC
template <typename number>
class MGCoarseSolverAMG : public MGCoarseGridBase<LinearAlgebra::distributed::Vector<number>>
{
Expand Down Expand Up @@ -375,7 +376,7 @@ class MGCoarseSolverAMG : public MGCoarseGridBase<LinearAlgebra::distributed::Ve
mutable VectorTypePETSc petsc_dst;
mutable double compute_time;
};

#endif


template <int dim>
Expand Down Expand Up @@ -805,8 +806,12 @@ LaplaceProblem<dim>::setup_coarse_solver()
{
if (do_coarse_amg)
{
#ifdef DEAL_II_WITH_PETSC
mg_coarse = std::make_unique<MGCoarseSolverAMG<typename LevelMatrixType::value_type>>(
mg_matrices[0].get_system_matrix(), 1);
#else
AssertThrow(false, ExcMessage("Cannot use AMG solver without PETSc"));
#endif
}
else
{
Expand Down Expand Up @@ -879,9 +884,11 @@ LaplaceProblem<dim>::solve()
pcout << mg_matrices[l].get_compute_time_and_reset() << "s ";
}
pcout << std::endl;
#ifdef DEAL_II_WITH_PETSC
if (auto coarse =
dynamic_cast<MGCoarseSolverAMG<typename LevelMatrixType::value_type> *>(mg_coarse.get()))
pcout << " coarse grid " << coarse->get_compute_time_and_reset() << "s" << std::endl;
#endif
pcout << "iterations: " << solver_control.last_step() << std::endl;
const unsigned int n_ranks = Utilities::MPI::n_mpi_processes(MPI_COMM_WORLD);
pcout << "throughput: "
Expand Down
Loading

0 comments on commit 8bdd1b0

Please sign in to comment.