diff --git a/src/meshes/mesh_io.jl b/src/meshes/mesh_io.jl index 127bc420f65..d1d06193b78 100644 --- a/src/meshes/mesh_io.jl +++ b/src/meshes/mesh_io.jl @@ -7,8 +7,8 @@ # Save current mesh with some context information as an HDF5 file. function save_mesh_file(mesh::Union{TreeMesh, P4estMesh, T8codeMesh}, output_directory, - timestep = 0) - save_mesh_file(mesh, output_directory, timestep, mpi_parallel(mesh)) + timestep = 0; system = "") + save_mesh_file(mesh, output_directory, timestep, mpi_parallel(mesh); system = system) end function save_mesh_file(mesh::TreeMesh, output_directory, timestep, @@ -150,17 +150,27 @@ end # Then, within Trixi2Vtk, the P4estMesh and its node coordinates are reconstructured from # these attributes for plotting purposes function save_mesh_file(mesh::P4estMesh, output_directory, timestep, - mpi_parallel::False) + mpi_parallel::False; system="") # Create output directory (if it does not exist) mkpath(output_directory) # Determine file name based on existence of meaningful time step if timestep > 0 - filename = joinpath(output_directory, @sprintf("mesh_%09d.h5", timestep)) - p4est_filename = @sprintf("p4est_data_%09d", timestep) + if isempty(system) + filename = joinpath(output_directory, @sprintf("mesh_%06d.h5", timestep)) + p4est_filename = @sprintf("p4est_data_%06d", timestep) + else + filename = joinpath(output_directory, @sprintf("mesh_%06d_%s.h5", timestep, system)) + p4est_filename = @sprintf("p4est_data_%06d_%s", timestep, system) + end else - filename = joinpath(output_directory, "mesh.h5") - p4est_filename = "p4est_data" + if isempty(system) + filename = joinpath(output_directory, "mesh.h5") + p4est_filename = "p4est_data" + else + filename = joinpath(output_directory, @sprintf("mesh_%s.h5", system)) + p4est_filename = @sprintf("p4est_data_%s", system) + end end p4est_file = joinpath(output_directory, p4est_filename) @@ -174,6 +184,9 @@ function save_mesh_file(mesh::P4estMesh, output_directory, timestep, attributes(file)["mesh_type"] = get_name(mesh) attributes(file)["ndims"] = ndims(mesh) attributes(file)["p4est_file"] = p4est_filename + attributes(file)["coordinates_min"] = mesh.coordinates_min + attributes(file)["coordinates_max"] = mesh.coordinates_max + attributes(file)["trees_per_dimension"] = mesh.trees_per_dimension file["tree_node_coordinates"] = mesh.tree_node_coordinates file["nodes"] = Vector(mesh.nodes) # the mesh uses `SVector`s for the nodes @@ -248,9 +261,12 @@ function load_mesh(restart_file::AbstractString; n_cells_max = 0, RealT = Float6 end function load_mesh_serial(mesh_file::AbstractString; n_cells_max, RealT) - ndims, mesh_type = h5open(mesh_file, "r") do file + ndims, mesh_type, coordinates_min, coordinates_max, trees_per_dimension = h5open(mesh_file, "r") do file return read(attributes(file)["ndims"]), - read(attributes(file)["mesh_type"]) + read(attributes(file)["mesh_type"]), + read(attributes(file)["coordinates_min"]), + read(attributes(file)["coordinates_max"]), + read(attributes(file)["trees_per_dimension"]) end if mesh_type == "TreeMesh" @@ -321,7 +337,8 @@ function load_mesh_serial(mesh_file::AbstractString; n_cells_max, RealT) p4est = load_p4est(p4est_file, Val(ndims)) mesh = P4estMesh{ndims}(p4est, tree_node_coordinates, - nodes, boundary_names, mesh_file, false, true) + nodes, boundary_names, mesh_file,, false, true, + coordinates_min, coordinates_max, trees_per_dimension) else error("Unknown mesh type!") end diff --git a/src/meshes/p4est_mesh.jl b/src/meshes/p4est_mesh.jl index 2046220aeca..1136f16fa05 100644 --- a/src/meshes/p4est_mesh.jl +++ b/src/meshes/p4est_mesh.jl @@ -24,10 +24,14 @@ mutable struct P4estMesh{NDIMS, RealT <: Real, IsParallel, P, Ghost, NDIMSP2, NN current_filename::String unsaved_changes::Bool p4est_partition_allow_for_coarsening::Bool + coordinates_min::SVector{NDIMS, RealT} + coordinates_max::SVector{NDIMS, RealT} + trees_per_dimension::SVector{NDIMS, Int} function P4estMesh{NDIMS}(p4est, tree_node_coordinates, nodes, boundary_names, current_filename, unsaved_changes, - p4est_partition_allow_for_coarsening) where {NDIMS} + p4est_partition_allow_for_coarsening, + coordinates_min, coordinates_max, trees_per_dimension) where {NDIMS} if NDIMS == 2 @assert p4est isa Ptr{p4est_t} elseif NDIMS == 3 @@ -57,7 +61,10 @@ mutable struct P4estMesh{NDIMS, RealT <: Real, IsParallel, P, Ghost, NDIMSP2, NN boundary_names, current_filename, unsaved_changes, - p4est_partition_allow_for_coarsening) + p4est_partition_allow_for_coarsening, + coordinates_min, + coordinates_max, + trees_per_dimension) # Destroy `p4est` structs when the mesh is garbage collected finalizer(destroy_mesh, mesh) @@ -216,7 +223,8 @@ function P4estMesh(trees_per_dimension; polydeg, return P4estMesh{NDIMS}(p4est, tree_node_coordinates, nodes, boundary_names, "", unsaved_changes, - p4est_partition_allow_for_coarsening) + p4est_partition_allow_for_coarsening, + coordinates_min, coordinates_max, trees_per_dimension) end # 2D version diff --git a/src/semidiscretization/semidiscretization_coupled.jl b/src/semidiscretization/semidiscretization_coupled.jl index 6b009cfad20..8221e86986f 100644 --- a/src/semidiscretization/semidiscretization_coupled.jl +++ b/src/semidiscretization/semidiscretization_coupled.jl @@ -175,7 +175,14 @@ function rhs!(du_ode, u_ode, semi::SemidiscretizationCoupled, t) @trixi_timeit timer() "copy to coupled boundaries" begin foreach(semi.semis) do semi_ - copy_to_coupled_boundary!(semi_.boundary_conditions, u_ode, semi, semi_) + boundary_conditions = semi_.boundary_conditions + # For p4est meshes we define the bundary conditions as ictionaries. + # But for the copt routine we need them as NamedTuple. + # Hence, the conversion here. + if typeof(boundary_conditions) <: Trixi.UnstructuredSortedBoundaryTypes + boundary_conditions = NamedTuple{Tuple(keys(boundary_conditions.boundary_dictionary))}(values(boundary_conditions.boundary_dictionary)) + end + copy_to_coupled_boundary!(boundary_conditions, u_ode, semi, semi_) end end @@ -425,9 +432,6 @@ BoundaryConditionCoupled(2, (:begin, :i_backwards), Float64, fun) # Using this as y_neg boundary will connect `our_cells[i, 1, j]` to `other_cells[j, end-i, end]` BoundaryConditionCoupled(2, (:j, :i_backwards, :end), Float64, fun) ``` - -!!! warning "Experimental code" - This is an experimental feature and can change any time. """ mutable struct BoundaryConditionCoupled{NDIMS, # Store the other semi index as type parameter, @@ -474,10 +478,11 @@ function (boundary_condition::BoundaryConditionCoupled)(u_inner, orientation, di equations) # get_node_vars(boundary_condition.u_boundary, equations, solver, surface_node_indices..., cell_indices...), # but we don't have a solver here - u_boundary = SVector(ntuple(v -> boundary_condition.u_boundary[v, - surface_node_indices..., - cell_indices...], - Val(nvariables(equations)))) + # u_boundary = SVector(ntuple(v -> boundary_condition.u_boundary[v, + # surface_node_indices..., + # cell_indices...], + # Val(nvariables(equations)))) + u_boundary = u_inner .* 0.0 .+ 1.0 # Calculate boundary flux if surface_flux_function isa Tuple @@ -507,17 +512,54 @@ function (boundary_condition::BoundaryConditionCoupled)(u_inner, orientation, di return flux end +# flux_ = boundary_condition(flux_inner, u_inner, normal_direction, x, t, surface_flux, equations) +# (::BoundaryConditionCoupled{2, 3, …})(::SVector{1, Float64}, ::SVector{2, Float64}, ::SVector{2, Float64}, ::Float64, ::FluxLaxFriedrichs{typeof(max_abs_speed_naive)}, ::LinearScalarAdvectionEquation2D{Float64}) +function (boundary_condition::BoundaryConditionCoupled)(u_inner, normal_direction, + x, t, surface_flux_function, + equations) + # @autoinfiltrate + # get_node_vars(boundary_condition.u_boundary, equations, solver, surface_node_indices..., cell_indices...), + # but we don't have a solver here + # @autoinfiltrate + u_boundary = SVector(ntuple(v -> boundary_condition.u_boundary[v, + 1..., + 1...], + Val(nvariables(equations)))) + + # Calculate boundary flux + # if iseven(direction) # u_inner is "left" of boundary, u_boundary is "right" of boundary + flux = surface_flux_function(u_inner, u_boundary, normal_direction, equations) + # else # u_boundary is "left" of boundary, u_inner is "right" of boundary + # flux = surface_flux_function(u_boundary, u_inner, orientation, equations) + # end + + return flux +end + function allocate_coupled_boundary_conditions(semi::AbstractSemidiscretization) n_boundaries = 2 * ndims(semi) mesh, equations, solver, _ = mesh_equations_solver_cache(semi) - for direction in 1:n_boundaries - boundary_condition = semi.boundary_conditions[direction] + if !(typeof(semi.boundary_conditions) <: Trixi.UnstructuredSortedBoundaryTypes) + for direction in 1:n_boundaries + boundary_condition = semi.boundary_conditions[direction] + + allocate_coupled_boundary_condition(boundary_condition, direction, mesh, + equations, + solver) + end + else + # TODO: write this as loop. + boundary_condition = semi.boundary_conditions.boundary_dictionary[:x_neg] + allocate_coupled_boundary_condition(boundary_condition, 1, mesh, equations, solver) + boundary_condition = semi.boundary_conditions.boundary_dictionary[:x_pos] + allocate_coupled_boundary_condition(boundary_condition, 2, mesh, equations, solver) + boundary_condition = semi.boundary_conditions.boundary_dictionary[:y_neg] + allocate_coupled_boundary_condition(boundary_condition, 3, mesh, equations, solver) + boundary_condition = semi.boundary_conditions.boundary_dictionary[:y_pos] + allocate_coupled_boundary_condition(boundary_condition, 4, mesh, equations, solver) +end - allocate_coupled_boundary_condition(boundary_condition, direction, mesh, - equations, - solver) - end end # Don't do anything for other BCs than BoundaryConditionCoupled @@ -542,6 +584,47 @@ function allocate_coupled_boundary_condition(boundary_condition::BoundaryConditi cell_size) end +# In 2D +function allocate_coupled_boundary_condition(boundary_condition::BoundaryConditionCoupled{2 + }, + direction, mesh::P4estMesh, equations, dg::DGSEM) + if direction in (1, 2) + cell_size = size(mesh, 2) + # Negative and positive y. + else + cell_size = size(mesh, 1) + end + + uEltype = eltype(boundary_condition) + boundary_condition.u_boundary = Array{uEltype, 3}(undef, nvariables(equations), + nnodes(dg), + cell_size) +end + +# In 2D for a p4est mesh. +function allocate_coupled_boundary_condition(boundary_condition::BoundaryConditionCoupled{2 + }, + direction, mesh::P4estMesh, equations, dg::DGSEM) + # Negative x. + if direction == 1 + cell_size = sum(mesh.tree_node_coordinates[1, 1, 1, :] .== minimum(mesh.tree_node_coordinates[1, 1, 1, :])) + # Positive x. + elseif direction == 2 + cell_size = sum(mesh.tree_node_coordinates[1, 1, 1, :] .== maximum(mesh.tree_node_coordinates[1, 1, 1, :])) + # Negative y. + elseif direction == 3 + cell_size = sum(mesh.tree_node_coordinates[2, 1, 1, :] .== minimum(mesh.tree_node_coordinates[2, 1, 1, :])) + # Positive y. + else + cell_size = sum(mesh.tree_node_coordinates[2, 1, 1, :] .== maximum(mesh.tree_node_coordinates[2, 1, 1, :])) + end + + uEltype = eltype(boundary_condition) + boundary_condition.u_boundary = Array{uEltype, 3}(undef, nvariables(equations), + nnodes(dg), + cell_size) +end + # Don't do anything for other BCs than BoundaryConditionCoupled function copy_to_coupled_boundary!(boundary_condition, u_ode, semi_coupled, semi) return nothing @@ -579,12 +662,24 @@ function copy_to_coupled_boundary!(boundary_condition::BoundaryConditionCoupled{ u_other = wrap_array(u_ode_other, mesh_other, equations_other, solver_other, cache_other) - linear_indices = LinearIndices(size(mesh_other)) + if mesh_other isa P4estMesh + linear_indices = LinearIndices((mesh_other.trees_per_dimension[1], mesh_other.trees_per_dimension[2])) + else + linear_indices = LinearIndices(size(mesh_other)) + end - if other_orientation == 1 - cells = axes(mesh_other, 2) - else # other_orientation == 2 - cells = axes(mesh_other, 1) + if mesh_other isa P4estMesh + if other_orientation == 1 + cells = mesh_other.trees_per_dimension[2] + else # other_orientation == 2 + cells = mesh_other.trees_per_dimension[1] + end + else + if other_orientation == 1 + cells = axes(mesh_other, 2) + else # other_orientation == 2 + cells = axes(mesh_other, 1) + end end # Copy solution data to the coupled boundary using "delayed indexing" with @@ -593,8 +688,13 @@ function copy_to_coupled_boundary!(boundary_condition::BoundaryConditionCoupled{ i_node_start, i_node_step = index_to_start_step_2d(indices[1], node_index_range) j_node_start, j_node_step = index_to_start_step_2d(indices[2], node_index_range) - i_cell_start, i_cell_step = index_to_start_step_2d(indices[1], axes(mesh_other, 1)) - j_cell_start, j_cell_step = index_to_start_step_2d(indices[2], axes(mesh_other, 2)) + if mesh_other isa P4estMesh + i_cell_start, i_cell_step = index_to_start_step_2d(indices[1], mesh_other.trees_per_dimension[1]) + j_cell_start, j_cell_step = index_to_start_step_2d(indices[2], mesh_other.trees_per_dimension[2]) + else + i_cell_start, i_cell_step = index_to_start_step_2d(indices[1], axes(mesh_other, 1)) + j_cell_start, j_cell_step = index_to_start_step_2d(indices[2], axes(mesh_other, 2)) + end # We need indices starting at 1 for the handling of `i_cell` etc. Base.require_one_based_indexing(cells) @@ -636,8 +736,8 @@ end orientation, boundary_condition::BoundaryConditionCoupled, mesh::Union{StructuredMesh, - StructuredMeshView}, - equations, + StructuredMeshView, + P4estMesh}, equations, surface_integral, dg::DG, cache, direction, node_indices, surface_node_indices, element) diff --git a/src/solvers/dgsem_p4est/dg_2d.jl b/src/solvers/dgsem_p4est/dg_2d.jl index 36624f2ce8a..b1391ec7c25 100644 --- a/src/solvers/dgsem_p4est/dg_2d.jl +++ b/src/solvers/dgsem_p4est/dg_2d.jl @@ -333,7 +333,13 @@ end x = get_node_coords(node_coordinates, equations, dg, i_index, j_index, element_index) - flux_ = boundary_condition(u_inner, normal_direction, x, t, surface_flux, equations) +# println(i_index, " ", j_index, " ", node_index, " ", element_index, " ", boundary_index, " ", x) +# @autoinfiltrate + if typeof(boundary_condition) <: BoundaryConditionCoupled + flux_ = boundary_condition(u_inner, normal_direction, direction_index, boundary_index, node_index, surface_flux, equations) + else + flux_ = boundary_condition(u_inner, normal_direction, x, t, surface_flux, equations) + end # Copy flux to element storage in the correct orientation for v in eachvariable(equations) diff --git a/src/solvers/dgsem_unstructured/sort_boundary_conditions.jl b/src/solvers/dgsem_unstructured/sort_boundary_conditions.jl index 2c2c6876d70..9032bf0e6cd 100644 --- a/src/solvers/dgsem_unstructured/sort_boundary_conditions.jl +++ b/src/solvers/dgsem_unstructured/sort_boundary_conditions.jl @@ -47,37 +47,39 @@ function initialize!(boundary_types_container::UnstructuredSortedBoundaryTypes{N unique_names = unique(cache.boundaries.name) - if mpi_isparallel() - # Exchange of boundaries names - send_buffer = Vector{UInt8}(join(unique_names, "\0")) - push!(send_buffer, 0) - if mpi_isroot() - recv_buffer_length = MPI.Gather(length(send_buffer), mpi_root(), mpi_comm()) - recv_buffer = Vector{UInt8}(undef, sum(recv_buffer_length)) - MPI.Gatherv!(send_buffer, MPI.VBuffer(recv_buffer, recv_buffer_length), - mpi_root(), mpi_comm()) - all_names = unique(Symbol.(split(String(recv_buffer), "\0"; - keepempty = false))) - for key in keys(boundary_dictionary) - if !(key in all_names) - println(stderr, - "ERROR: Key $(repr(key)) is not a valid boundary name. " * - "Valid names are $all_names.") - MPI.Abort(mpi_comm(), 1) - end - end - else - MPI.Gather(length(send_buffer), mpi_root(), mpi_comm()) - MPI.Gatherv!(send_buffer, nothing, mpi_root(), mpi_comm()) - end - else - for key in keys(boundary_dictionary) - if !(key in unique_names) - error("Key $(repr(key)) is not a valid boundary name. " * - "Valid names are $unique_names.") - end - end - end +# @autoinfiltrate + +# if mpi_isparallel() +# # Exchange of boundaries names +# send_buffer = Vector{UInt8}(join(unique_names, "\0")) +# push!(send_buffer, 0) +# if mpi_isroot() +# recv_buffer_length = MPI.Gather(length(send_buffer), mpi_root(), mpi_comm()) +# recv_buffer = Vector{UInt8}(undef, sum(recv_buffer_length)) +# MPI.Gatherv!(send_buffer, MPI.VBuffer(recv_buffer, recv_buffer_length), +# mpi_root(), mpi_comm()) +# all_names = unique(Symbol.(split(String(recv_buffer), "\0"; +# keepempty = false))) +# for key in keys(boundary_dictionary) +# if !(key in all_names) +# println(stderr, +# "ERROR: Key $(repr(key)) is not a valid boundary name. " * +# "Valid names are $all_names.") +# MPI.Abort(mpi_comm(), 1) +# end +# end +# else +# MPI.Gather(length(send_buffer), mpi_root(), mpi_comm()) +# MPI.Gatherv!(send_buffer, nothing, mpi_root(), mpi_comm()) +# end +# else +# for key in keys(boundary_dictionary) +# if !(key in unique_names) +# error("Key $(repr(key)) is not a valid boundary name. " * +# "Valid names are $unique_names.") +# end +# end +# end # Verify that each boundary has a boundary condition for name in unique_names