diff --git a/src/solvers/dgsem_p4est/dg_2d_parallel.jl b/src/solvers/dgsem_p4est/dg_2d_parallel.jl index d76d5083573..c5c8557bfbd 100644 --- a/src/solvers/dgsem_p4est/dg_2d_parallel.jl +++ b/src/solvers/dgsem_p4est/dg_2d_parallel.jl @@ -5,99 +5,6 @@ @muladd begin -# TODO: Unify this function with the `ParallelTreeMesh` `rhs!` once mortar support has been added -# to the `ParallelP4estMesh` -function rhs!(du, u, t, - mesh::ParallelP4estMesh{2}, equations, - initial_condition, boundary_conditions, source_terms, - dg::DG, cache) - # Start to receive MPI data - @trixi_timeit timer() "start MPI receive" start_mpi_receive!(cache.mpi_cache) - - # Prolong solution to MPI interfaces - @trixi_timeit timer() "prolong2mpiinterfaces" prolong2mpiinterfaces!( - cache, u, mesh, equations, dg.surface_integral, dg) - - # Prolong solution to MPI mortars - @trixi_timeit timer() "prolong2mpimortars" prolong2mpimortars!( - cache, u, mesh, equations, dg.mortar, dg.surface_integral, dg) - - # Start to send MPI data - @trixi_timeit timer() "start MPI send" start_mpi_send!( - cache.mpi_cache, mesh, equations, dg, cache) - - # Reset du - @trixi_timeit timer() "reset ∂u/∂t" reset_du!(du, dg, cache) - - # Calculate volume integral - @trixi_timeit timer() "volume integral" calc_volume_integral!( - du, u, mesh, - have_nonconservative_terms(equations), equations, - dg.volume_integral, dg, cache) - - # Prolong solution to interfaces - @trixi_timeit timer() "prolong2interfaces" prolong2interfaces!( - cache, u, mesh, equations, dg.surface_integral, dg) - - # Calculate interface fluxes - @trixi_timeit timer() "interface flux" calc_interface_flux!( - cache.elements.surface_flux_values, mesh, - have_nonconservative_terms(equations), equations, - dg.surface_integral, dg, cache) - - # Prolong solution to boundaries - @trixi_timeit timer() "prolong2boundaries" prolong2boundaries!( - cache, u, mesh, equations, dg.surface_integral, dg) - - # Calculate boundary fluxes - @trixi_timeit timer() "boundary flux" calc_boundary_flux!( - cache, t, boundary_conditions, mesh, equations, dg.surface_integral, dg) - - # Prolong solution to mortars - @trixi_timeit timer() "prolong2mortars" prolong2mortars!( - cache, u, mesh, equations, dg.mortar, dg.surface_integral, dg) - - # Calculate mortar fluxes - @trixi_timeit timer() "mortar flux" calc_mortar_flux!( - cache.elements.surface_flux_values, mesh, - have_nonconservative_terms(equations), equations, - dg.mortar, dg.surface_integral, dg, cache) - - # Finish to receive MPI data - @trixi_timeit timer() "finish MPI receive" finish_mpi_receive!( - cache.mpi_cache, mesh, equations, dg, cache) - - # Calculate MPI interface fluxes - @trixi_timeit timer() "MPI interface flux" calc_mpi_interface_flux!( - cache.elements.surface_flux_values, mesh, - have_nonconservative_terms(equations), equations, - dg.surface_integral, dg, cache) - - # Calculate MPI mortar fluxes - @trixi_timeit timer() "MPI mortar flux" calc_mpi_mortar_flux!( - cache.elements.surface_flux_values, mesh, - have_nonconservative_terms(equations), equations, - dg.mortar, dg.surface_integral, dg, cache) - - # Calculate surface integrals - @trixi_timeit timer() "surface integral" calc_surface_integral!( - du, u, mesh, equations, dg.surface_integral, dg, cache) - - # Apply Jacobian from mapping to reference element - @trixi_timeit timer() "Jacobian" apply_jacobian!( - du, mesh, equations, dg, cache) - - # Calculate source terms - @trixi_timeit timer() "source terms" calc_sources!( - du, u, t, source_terms, equations, dg, cache) - - # Finish to send MPI data - @trixi_timeit timer() "finish MPI send" finish_mpi_send!(cache.mpi_cache) - - return nothing -end - - function prolong2mpiinterfaces!(cache, u, mesh::ParallelP4estMesh{2}, equations, surface_integral, dg::DG) diff --git a/src/solvers/dgsem_tree/dg_2d_parallel.jl b/src/solvers/dgsem_tree/dg_2d_parallel.jl index d3e1d9406fc..10c198f9be1 100644 --- a/src/solvers/dgsem_tree/dg_2d_parallel.jl +++ b/src/solvers/dgsem_tree/dg_2d_parallel.jl @@ -443,7 +443,7 @@ end function rhs!(du, u, t, - mesh::ParallelTreeMesh{2}, equations, + mesh::Union{ParallelTreeMesh{2}, ParallelP4estMesh{2}}, equations, initial_condition, boundary_conditions, source_terms, dg::DG, cache) # Start to receive MPI data