Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

unify parallel 2D tree/p4est mesh rhs! #1111

Merged
merged 1 commit into from
Apr 5, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
93 changes: 0 additions & 93 deletions src/solvers/dgsem_p4est/dg_2d_parallel.jl
Original file line number Diff line number Diff line change
Expand Up @@ -5,99 +5,6 @@
@muladd begin


# TODO: Unify this function with the `ParallelTreeMesh` `rhs!` once mortar support has been added
# to the `ParallelP4estMesh`
function rhs!(du, u, t,
mesh::ParallelP4estMesh{2}, equations,
initial_condition, boundary_conditions, source_terms,
dg::DG, cache)
# Start to receive MPI data
@trixi_timeit timer() "start MPI receive" start_mpi_receive!(cache.mpi_cache)

# Prolong solution to MPI interfaces
@trixi_timeit timer() "prolong2mpiinterfaces" prolong2mpiinterfaces!(
cache, u, mesh, equations, dg.surface_integral, dg)

# Prolong solution to MPI mortars
@trixi_timeit timer() "prolong2mpimortars" prolong2mpimortars!(
cache, u, mesh, equations, dg.mortar, dg.surface_integral, dg)

# Start to send MPI data
@trixi_timeit timer() "start MPI send" start_mpi_send!(
cache.mpi_cache, mesh, equations, dg, cache)

# Reset du
@trixi_timeit timer() "reset ∂u/∂t" reset_du!(du, dg, cache)

# Calculate volume integral
@trixi_timeit timer() "volume integral" calc_volume_integral!(
du, u, mesh,
have_nonconservative_terms(equations), equations,
dg.volume_integral, dg, cache)

# Prolong solution to interfaces
@trixi_timeit timer() "prolong2interfaces" prolong2interfaces!(
cache, u, mesh, equations, dg.surface_integral, dg)

# Calculate interface fluxes
@trixi_timeit timer() "interface flux" calc_interface_flux!(
cache.elements.surface_flux_values, mesh,
have_nonconservative_terms(equations), equations,
dg.surface_integral, dg, cache)

# Prolong solution to boundaries
@trixi_timeit timer() "prolong2boundaries" prolong2boundaries!(
cache, u, mesh, equations, dg.surface_integral, dg)

# Calculate boundary fluxes
@trixi_timeit timer() "boundary flux" calc_boundary_flux!(
cache, t, boundary_conditions, mesh, equations, dg.surface_integral, dg)

# Prolong solution to mortars
@trixi_timeit timer() "prolong2mortars" prolong2mortars!(
cache, u, mesh, equations, dg.mortar, dg.surface_integral, dg)

# Calculate mortar fluxes
@trixi_timeit timer() "mortar flux" calc_mortar_flux!(
cache.elements.surface_flux_values, mesh,
have_nonconservative_terms(equations), equations,
dg.mortar, dg.surface_integral, dg, cache)

# Finish to receive MPI data
@trixi_timeit timer() "finish MPI receive" finish_mpi_receive!(
cache.mpi_cache, mesh, equations, dg, cache)

# Calculate MPI interface fluxes
@trixi_timeit timer() "MPI interface flux" calc_mpi_interface_flux!(
cache.elements.surface_flux_values, mesh,
have_nonconservative_terms(equations), equations,
dg.surface_integral, dg, cache)

# Calculate MPI mortar fluxes
@trixi_timeit timer() "MPI mortar flux" calc_mpi_mortar_flux!(
cache.elements.surface_flux_values, mesh,
have_nonconservative_terms(equations), equations,
dg.mortar, dg.surface_integral, dg, cache)

# Calculate surface integrals
@trixi_timeit timer() "surface integral" calc_surface_integral!(
du, u, mesh, equations, dg.surface_integral, dg, cache)

# Apply Jacobian from mapping to reference element
@trixi_timeit timer() "Jacobian" apply_jacobian!(
du, mesh, equations, dg, cache)

# Calculate source terms
@trixi_timeit timer() "source terms" calc_sources!(
du, u, t, source_terms, equations, dg, cache)

# Finish to send MPI data
@trixi_timeit timer() "finish MPI send" finish_mpi_send!(cache.mpi_cache)

return nothing
end


function prolong2mpiinterfaces!(cache, u,
mesh::ParallelP4estMesh{2},
equations, surface_integral, dg::DG)
Expand Down
2 changes: 1 addition & 1 deletion src/solvers/dgsem_tree/dg_2d_parallel.jl
Original file line number Diff line number Diff line change
Expand Up @@ -443,7 +443,7 @@ end


function rhs!(du, u, t,
mesh::ParallelTreeMesh{2}, equations,
mesh::Union{ParallelTreeMesh{2}, ParallelP4estMesh{2}}, equations,
initial_condition, boundary_conditions, source_terms,
dg::DG, cache)
# Start to receive MPI data
Expand Down