Path: blob/main/src/callbacks_step/save_solution_dg.jl
2055 views
# By default, Julia/LLVM does not use fused multiply-add operations (FMAs).1# Since these FMAs can increase the performance of many numerical algorithms,2# we need to opt-in explicitly.3# See https://ranocha.de/blog/Optimizing_EC_Trixi for further details.4@muladd begin5#! format: noindent67function save_solution_file(u, time, dt, timestep,8mesh::Union{SerialTreeMesh, StructuredMesh,9StructuredMeshView,10UnstructuredMesh2D,11SerialP4estMesh, P4estMeshView,12SerialT8codeMesh},13equations, dg::DG, cache,14solution_callback,15element_variables = Dict{Symbol, Any}(),16node_variables = Dict{Symbol, Any}();17system = "")18@unpack output_directory, solution_variables = solution_callback1920# Filename based on current time step21if isempty(system)22filename = joinpath(output_directory, @sprintf("solution_%09d.h5", timestep))23else24filename = joinpath(output_directory,25@sprintf("solution_%s_%09d.h5", system, timestep))26end2728# Convert to different set of variables if requested29if solution_variables === cons2cons30data = u31n_vars = nvariables(equations)32else33# Reinterpret the solution array as an array of conservative variables,34# compute the solution variables via broadcasting, and reinterpret the35# result as a plain array of floating point numbers36data = Array(reinterpret(eltype(u),37solution_variables.(reinterpret(SVector{nvariables(equations),38eltype(u)}, u),39Ref(equations))))4041# Find out variable count by looking at output from `solution_variables` function42n_vars = size(data, 1)43end4445# Open file (clobber existing content)46h5open(filename, "w") do file47# Add context information as attributes48attributes(file)["ndims"] = ndims(mesh)49attributes(file)["equations"] = get_name(equations)50attributes(file)["polydeg"] = polydeg(dg)51attributes(file)["n_vars"] = n_vars52attributes(file)["n_elements"] = nelements(dg, cache)53attributes(file)["mesh_type"] = get_name(mesh)54attributes(file)["mesh_file"] = splitdir(mesh.current_filename)[2]55attributes(file)["time"] = convert(Float64, time) # Ensure that `time` is written as a double precision scalar56attributes(file)["dt"] = convert(Float64, dt) # Ensure that `dt` is written as a double precision scalar57attributes(file)["timestep"] = timestep5859# Store each variable of the solution data60for v in 1:n_vars61# Convert to 1D array62file["variables_$v"] = vec(data[v, .., :])6364# Add variable name as attribute65var = file["variables_$v"]66attributes(var)["name"] = varnames(solution_variables, equations)[v]67end6869# Store element variables70for (v, (key, element_variable)) in enumerate(element_variables)71# Add to file72file["element_variables_$v"] = element_variable7374# Add variable name as attribute75var = file["element_variables_$v"]76attributes(var)["name"] = string(key)77end7879# Store node variables80for (v, (key, node_variable)) in enumerate(node_variables)81# Add to file82file["node_variables_$v"] = node_variable8384# Add variable name as attribute85var = file["node_variables_$v"]86attributes(var)["name"] = string(key)87end88end8990return filename91end9293function save_solution_file(u, time, dt, timestep,94mesh::SerialDGMultiMesh,95equations, dg::DG, cache,96solution_callback,97element_variables = Dict{Symbol, Any}(),98node_variables = Dict{Symbol, Any}();99system = "")100@unpack output_directory, solution_variables = solution_callback101102# Filename based on current time step.103if isempty(system)104filename = joinpath(output_directory, @sprintf("solution_%09d.h5", timestep))105else106filename = joinpath(output_directory,107@sprintf("solution_%s_%09d.h5", system, timestep))108end109110# Convert to different set of variables if requested.111if solution_variables === cons2cons112data = u113n_vars = nvariables(equations)114else115data = map(u_node -> solution_variables(u_node, equations), u)116# Find out variable count by looking at output from `solution_variables` function.117n_vars = length(data[1])118end119120# Open file (clobber existing content).121h5open(filename, "w") do file122# Add context information as attributes.123attributes(file)["ndims"] = ndims(mesh)124attributes(file)["equations"] = get_name(equations)125126if dg.basis.approximation_type isa TensorProductWedge127attributes(file)["polydeg_tri"] = dg.basis.N[2]128attributes(file)["polydeg_line"] = dg.basis.N[1]129else130attributes(file)["polydeg"] = dg.basis.N131end132133attributes(file)["element_type"] = dg.basis.element_type |> typeof |> nameof |>134string135attributes(file)["n_vars"] = n_vars136attributes(file)["n_elements"] = nelements(dg, cache)137attributes(file)["dof_per_elem"] = length(dg.basis.r)138attributes(file)["mesh_type"] = get_name(mesh)139attributes(file)["mesh_file"] = splitdir(mesh.current_filename)[2]140attributes(file)["time"] = convert(Float64, time) # Ensure that `time` is written as a double precision scalar.141attributes(file)["dt"] = convert(Float64, dt) # Ensure that `dt` is written as a double precision scalar.142attributes(file)["timestep"] = timestep143144# Store each variable of the solution data.145for v in 1:n_vars146temp = zeros(size(u.u))147n_nodes, n_elems = size(u.u)148for i_elem in 1:n_elems149for i_node in 1:n_nodes150temp[i_node, i_elem] = data[i_node, i_elem][v]151end152end153154file["variables_$v"] = temp155156# Add variable name as attribute.157var = file["variables_$v"]158attributes(var)["name"] = varnames(solution_variables, equations)[v]159end160161# Store element variables.162for (v, (key, element_variable)) in enumerate(element_variables)163# Add to file.164file["element_variables_$v"] = element_variable165166# Add variable name as attribute.167var = file["element_variables_$v"]168attributes(var)["name"] = string(key)169end170171# Store node variables.172for (v, (key, node_variable)) in enumerate(node_variables)173# Add to file174file["node_variables_$v"] = node_variable175176# Add variable name as attribute.177var = file["node_variables_$v"]178attributes(var)["name"] = string(key)179end180end181182return filename183end184185function save_solution_file(u, time, dt, timestep,186mesh::Union{ParallelTreeMesh, ParallelP4estMesh,187ParallelT8codeMesh}, equations,188dg::DG, cache,189solution_callback,190element_variables = Dict{Symbol, Any}(),191node_variables = Dict{Symbol, Any}();192system = "")193@unpack output_directory, solution_variables = solution_callback194195# Filename based on current time step196if isempty(system)197filename = joinpath(output_directory, @sprintf("solution_%09d.h5", timestep))198else199filename = joinpath(output_directory,200@sprintf("solution_%s_%09d.h5", system, timestep))201end202203# Convert to different set of variables if requested204if solution_variables === cons2cons205data = u206n_vars = nvariables(equations)207else208# Reinterpret the solution array as an array of conservative variables,209# compute the solution variables via broadcasting, and reinterpret the210# result as a plain array of floating point numbers211data = Array(reinterpret(eltype(u),212solution_variables.(reinterpret(SVector{nvariables(equations),213eltype(u)}, u),214Ref(equations))))215216# Find out variable count by looking at output from `solution_variables` function217n_vars = size(data, 1)218end219220if HDF5.has_parallel()221save_solution_file_parallel(data, time, dt, timestep, n_vars, mesh, equations,222dg, cache, solution_variables, filename,223element_variables, node_variables)224else225save_solution_file_on_root(data, time, dt, timestep, n_vars, mesh, equations,226dg, cache, solution_variables, filename,227element_variables, node_variables)228end229end230231function save_solution_file_parallel(data, time, dt, timestep, n_vars,232mesh::Union{ParallelTreeMesh, ParallelP4estMesh,233ParallelT8codeMesh},234equations, dg::DG, cache,235solution_variables, filename,236element_variables = Dict{Symbol, Any}(),237node_variables = Dict{Symbol, Any}())238239# Calculate element and node counts by MPI rank240element_size = nnodes(dg)^ndims(mesh)241element_counts = cache.mpi_cache.n_elements_by_rank242node_counts = element_counts * element_size243# Cumulative sum of elements per rank starting with an additional 0244cum_element_counts = append!(zeros(eltype(element_counts), 1),245cumsum(element_counts))246# Cumulative sum of nodes per rank starting with an additional 0247cum_node_counts = append!(zeros(eltype(node_counts), 1), cumsum(node_counts))248249# Open file using parallel HDF5 (clobber existing content)250h5open(filename, "w", mpi_comm()) do file251# Add context information as attributes252attributes(file)["ndims"] = ndims(mesh)253attributes(file)["equations"] = get_name(equations)254attributes(file)["polydeg"] = polydeg(dg)255attributes(file)["n_vars"] = n_vars256attributes(file)["n_elements"] = nelementsglobal(mesh, dg, cache)257attributes(file)["mesh_type"] = get_name(mesh)258attributes(file)["mesh_file"] = splitdir(mesh.current_filename)[2]259attributes(file)["time"] = convert(Float64, time) # Ensure that `time` is written as a double precision scalar260attributes(file)["dt"] = convert(Float64, dt) # Ensure that `dt` is written as a double precision scalar261attributes(file)["timestep"] = timestep262263# Store each variable of the solution data264for v in 1:n_vars265# Need to create dataset explicitly in parallel case266var = create_dataset(file, "/variables_$v", datatype(eltype(data)),267dataspace((ndofsglobal(mesh, dg, cache),)))268# Write data of each process in slices (ranks start with 0)269slice = (cum_node_counts[mpi_rank() + 1] + 1):cum_node_counts[mpi_rank() + 2]270# Convert to 1D array271var[slice] = vec(data[v, .., :])272# Add variable name as attribute273attributes(var)["name"] = varnames(solution_variables, equations)[v]274end275276# Store element variables277for (v, (key, element_variable)) in enumerate(element_variables)278# Need to create dataset explicitly in parallel case279var = create_dataset(file, "/element_variables_$v",280datatype(eltype(element_variable)),281dataspace((nelementsglobal(mesh, dg, cache),)))282283# Write data of each process in slices (ranks start with 0)284slice = (cum_element_counts[mpi_rank() + 1] + 1):cum_element_counts[mpi_rank() + 2]285# Add to file286var[slice] = element_variable287# Add variable name as attribute288attributes(var)["name"] = string(key)289end290291# Store node variables292for (v, (key, node_variable)) in enumerate(node_variables)293# Need to create dataset explicitly in parallel case294var = create_dataset(file, "/node_variables_$v",295datatype(eltype(node_variable)),296dataspace((nelementsglobal(mesh, dg, cache) *297element_size,)))298299# Write data of each process in slices (ranks start with 0)300slice = (cum_node_counts[mpi_rank() + 1] + 1):cum_node_counts[mpi_rank() + 2]301# Add to file302var[slice] = node_variable303# Add variable name as attribute304attributes(var)["name"] = string(key)305end306end307308return filename309end310311function save_solution_file_on_root(data, time, dt, timestep, n_vars,312mesh::Union{ParallelTreeMesh, ParallelP4estMesh,313ParallelT8codeMesh},314equations, dg::DG, cache,315solution_variables, filename,316element_variables = Dict{Symbol, Any}(),317node_variables = Dict{Symbol, Any}())318319# Calculate element and node counts by MPI rank320element_size = nnodes(dg)^ndims(mesh)321element_counts = convert(Vector{Cint}, collect(cache.mpi_cache.n_elements_by_rank))322node_counts = element_counts * Cint(element_size)323324# non-root ranks only send data325if !mpi_isroot()326# Send nodal data to root327for v in 1:n_vars328MPI.Gatherv!(vec(data[v, .., :]), nothing, mpi_root(), mpi_comm())329end330331# Send element data to root332for (key, element_variable) in element_variables333MPI.Gatherv!(element_variable, nothing, mpi_root(), mpi_comm())334end335336# Send additional/extra node variables to root337for (key, node_variable) in node_variables338MPI.Gatherv!(node_variable, nothing, mpi_root(), mpi_comm())339end340341return filename342end343344# Open file (clobber existing content)345h5open(filename, "w") do file346# Add context information as attributes347attributes(file)["ndims"] = ndims(mesh)348attributes(file)["equations"] = get_name(equations)349attributes(file)["polydeg"] = polydeg(dg)350attributes(file)["n_vars"] = n_vars351attributes(file)["n_elements"] = nelementsglobal(mesh, dg, cache)352attributes(file)["mesh_type"] = get_name(mesh)353attributes(file)["mesh_file"] = splitdir(mesh.current_filename)[2]354attributes(file)["time"] = convert(Float64, time) # Ensure that `time` is written as a double precision scalar355attributes(file)["dt"] = convert(Float64, dt) # Ensure that `dt` is written as a double precision scalar356attributes(file)["timestep"] = timestep357358# Store each variable of the solution data359for v in 1:n_vars360# Convert to 1D array361recv = Vector{eltype(data)}(undef, sum(node_counts))362MPI.Gatherv!(vec(data[v, .., :]), MPI.VBuffer(recv, node_counts),363mpi_root(), mpi_comm())364file["variables_$v"] = recv365366# Add variable name as attribute367var = file["variables_$v"]368attributes(var)["name"] = varnames(solution_variables, equations)[v]369end370371# Store element variables372for (v, (key, element_variable)) in enumerate(element_variables)373# Add to file374recv = Vector{eltype(data)}(undef, sum(element_counts))375MPI.Gatherv!(element_variable, MPI.VBuffer(recv, element_counts),376mpi_root(), mpi_comm())377file["element_variables_$v"] = recv378379# Add variable name as attribute380var = file["element_variables_$v"]381attributes(var)["name"] = string(key)382end383384# Store node variables385for (v, (key, node_variable)) in enumerate(node_variables)386# Add to file387recv = Vector{eltype(data)}(undef, sum(node_counts))388MPI.Gatherv!(node_variable, MPI.VBuffer(recv, node_counts),389mpi_root(), mpi_comm())390file["node_variables_$v"] = recv391392# Add variable name as attribute393var = file["node_variables_$v"]394attributes(var)["name"] = string(key)395end396end397398return filename399end400end # @muladd401402403