Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
trixi-framework
GitHub Repository: trixi-framework/Trixi.jl
Path: blob/main/src/callbacks_step/save_solution_dg.jl
2055 views
1
# By default, Julia/LLVM does not use fused multiply-add operations (FMAs).
2
# Since these FMAs can increase the performance of many numerical algorithms,
3
# we need to opt-in explicitly.
4
# See https://ranocha.de/blog/Optimizing_EC_Trixi for further details.
5
@muladd begin
6
#! format: noindent
7
8
function save_solution_file(u, time, dt, timestep,
9
mesh::Union{SerialTreeMesh, StructuredMesh,
10
StructuredMeshView,
11
UnstructuredMesh2D,
12
SerialP4estMesh, P4estMeshView,
13
SerialT8codeMesh},
14
equations, dg::DG, cache,
15
solution_callback,
16
element_variables = Dict{Symbol, Any}(),
17
node_variables = Dict{Symbol, Any}();
18
system = "")
19
@unpack output_directory, solution_variables = solution_callback
20
21
# Filename based on current time step
22
if isempty(system)
23
filename = joinpath(output_directory, @sprintf("solution_%09d.h5", timestep))
24
else
25
filename = joinpath(output_directory,
26
@sprintf("solution_%s_%09d.h5", system, timestep))
27
end
28
29
# Convert to different set of variables if requested
30
if solution_variables === cons2cons
31
data = u
32
n_vars = nvariables(equations)
33
else
34
# Reinterpret the solution array as an array of conservative variables,
35
# compute the solution variables via broadcasting, and reinterpret the
36
# result as a plain array of floating point numbers
37
data = Array(reinterpret(eltype(u),
38
solution_variables.(reinterpret(SVector{nvariables(equations),
39
eltype(u)}, u),
40
Ref(equations))))
41
42
# Find out variable count by looking at output from `solution_variables` function
43
n_vars = size(data, 1)
44
end
45
46
# Open file (clobber existing content)
47
h5open(filename, "w") do file
48
# Add context information as attributes
49
attributes(file)["ndims"] = ndims(mesh)
50
attributes(file)["equations"] = get_name(equations)
51
attributes(file)["polydeg"] = polydeg(dg)
52
attributes(file)["n_vars"] = n_vars
53
attributes(file)["n_elements"] = nelements(dg, cache)
54
attributes(file)["mesh_type"] = get_name(mesh)
55
attributes(file)["mesh_file"] = splitdir(mesh.current_filename)[2]
56
attributes(file)["time"] = convert(Float64, time) # Ensure that `time` is written as a double precision scalar
57
attributes(file)["dt"] = convert(Float64, dt) # Ensure that `dt` is written as a double precision scalar
58
attributes(file)["timestep"] = timestep
59
60
# Store each variable of the solution data
61
for v in 1:n_vars
62
# Convert to 1D array
63
file["variables_$v"] = vec(data[v, .., :])
64
65
# Add variable name as attribute
66
var = file["variables_$v"]
67
attributes(var)["name"] = varnames(solution_variables, equations)[v]
68
end
69
70
# Store element variables
71
for (v, (key, element_variable)) in enumerate(element_variables)
72
# Add to file
73
file["element_variables_$v"] = element_variable
74
75
# Add variable name as attribute
76
var = file["element_variables_$v"]
77
attributes(var)["name"] = string(key)
78
end
79
80
# Store node variables
81
for (v, (key, node_variable)) in enumerate(node_variables)
82
# Add to file
83
file["node_variables_$v"] = node_variable
84
85
# Add variable name as attribute
86
var = file["node_variables_$v"]
87
attributes(var)["name"] = string(key)
88
end
89
end
90
91
return filename
92
end
93
94
function save_solution_file(u, time, dt, timestep,
95
mesh::SerialDGMultiMesh,
96
equations, dg::DG, cache,
97
solution_callback,
98
element_variables = Dict{Symbol, Any}(),
99
node_variables = Dict{Symbol, Any}();
100
system = "")
101
@unpack output_directory, solution_variables = solution_callback
102
103
# Filename based on current time step.
104
if isempty(system)
105
filename = joinpath(output_directory, @sprintf("solution_%09d.h5", timestep))
106
else
107
filename = joinpath(output_directory,
108
@sprintf("solution_%s_%09d.h5", system, timestep))
109
end
110
111
# Convert to different set of variables if requested.
112
if solution_variables === cons2cons
113
data = u
114
n_vars = nvariables(equations)
115
else
116
data = map(u_node -> solution_variables(u_node, equations), u)
117
# Find out variable count by looking at output from `solution_variables` function.
118
n_vars = length(data[1])
119
end
120
121
# Open file (clobber existing content).
122
h5open(filename, "w") do file
123
# Add context information as attributes.
124
attributes(file)["ndims"] = ndims(mesh)
125
attributes(file)["equations"] = get_name(equations)
126
127
if dg.basis.approximation_type isa TensorProductWedge
128
attributes(file)["polydeg_tri"] = dg.basis.N[2]
129
attributes(file)["polydeg_line"] = dg.basis.N[1]
130
else
131
attributes(file)["polydeg"] = dg.basis.N
132
end
133
134
attributes(file)["element_type"] = dg.basis.element_type |> typeof |> nameof |>
135
string
136
attributes(file)["n_vars"] = n_vars
137
attributes(file)["n_elements"] = nelements(dg, cache)
138
attributes(file)["dof_per_elem"] = length(dg.basis.r)
139
attributes(file)["mesh_type"] = get_name(mesh)
140
attributes(file)["mesh_file"] = splitdir(mesh.current_filename)[2]
141
attributes(file)["time"] = convert(Float64, time) # Ensure that `time` is written as a double precision scalar.
142
attributes(file)["dt"] = convert(Float64, dt) # Ensure that `dt` is written as a double precision scalar.
143
attributes(file)["timestep"] = timestep
144
145
# Store each variable of the solution data.
146
for v in 1:n_vars
147
temp = zeros(size(u.u))
148
n_nodes, n_elems = size(u.u)
149
for i_elem in 1:n_elems
150
for i_node in 1:n_nodes
151
temp[i_node, i_elem] = data[i_node, i_elem][v]
152
end
153
end
154
155
file["variables_$v"] = temp
156
157
# Add variable name as attribute.
158
var = file["variables_$v"]
159
attributes(var)["name"] = varnames(solution_variables, equations)[v]
160
end
161
162
# Store element variables.
163
for (v, (key, element_variable)) in enumerate(element_variables)
164
# Add to file.
165
file["element_variables_$v"] = element_variable
166
167
# Add variable name as attribute.
168
var = file["element_variables_$v"]
169
attributes(var)["name"] = string(key)
170
end
171
172
# Store node variables.
173
for (v, (key, node_variable)) in enumerate(node_variables)
174
# Add to file
175
file["node_variables_$v"] = node_variable
176
177
# Add variable name as attribute.
178
var = file["node_variables_$v"]
179
attributes(var)["name"] = string(key)
180
end
181
end
182
183
return filename
184
end
185
186
function save_solution_file(u, time, dt, timestep,
187
mesh::Union{ParallelTreeMesh, ParallelP4estMesh,
188
ParallelT8codeMesh}, equations,
189
dg::DG, cache,
190
solution_callback,
191
element_variables = Dict{Symbol, Any}(),
192
node_variables = Dict{Symbol, Any}();
193
system = "")
194
@unpack output_directory, solution_variables = solution_callback
195
196
# Filename based on current time step
197
if isempty(system)
198
filename = joinpath(output_directory, @sprintf("solution_%09d.h5", timestep))
199
else
200
filename = joinpath(output_directory,
201
@sprintf("solution_%s_%09d.h5", system, timestep))
202
end
203
204
# Convert to different set of variables if requested
205
if solution_variables === cons2cons
206
data = u
207
n_vars = nvariables(equations)
208
else
209
# Reinterpret the solution array as an array of conservative variables,
210
# compute the solution variables via broadcasting, and reinterpret the
211
# result as a plain array of floating point numbers
212
data = Array(reinterpret(eltype(u),
213
solution_variables.(reinterpret(SVector{nvariables(equations),
214
eltype(u)}, u),
215
Ref(equations))))
216
217
# Find out variable count by looking at output from `solution_variables` function
218
n_vars = size(data, 1)
219
end
220
221
if HDF5.has_parallel()
222
save_solution_file_parallel(data, time, dt, timestep, n_vars, mesh, equations,
223
dg, cache, solution_variables, filename,
224
element_variables, node_variables)
225
else
226
save_solution_file_on_root(data, time, dt, timestep, n_vars, mesh, equations,
227
dg, cache, solution_variables, filename,
228
element_variables, node_variables)
229
end
230
end
231
232
function save_solution_file_parallel(data, time, dt, timestep, n_vars,
233
mesh::Union{ParallelTreeMesh, ParallelP4estMesh,
234
ParallelT8codeMesh},
235
equations, dg::DG, cache,
236
solution_variables, filename,
237
element_variables = Dict{Symbol, Any}(),
238
node_variables = Dict{Symbol, Any}())
239
240
# Calculate element and node counts by MPI rank
241
element_size = nnodes(dg)^ndims(mesh)
242
element_counts = cache.mpi_cache.n_elements_by_rank
243
node_counts = element_counts * element_size
244
# Cumulative sum of elements per rank starting with an additional 0
245
cum_element_counts = append!(zeros(eltype(element_counts), 1),
246
cumsum(element_counts))
247
# Cumulative sum of nodes per rank starting with an additional 0
248
cum_node_counts = append!(zeros(eltype(node_counts), 1), cumsum(node_counts))
249
250
# Open file using parallel HDF5 (clobber existing content)
251
h5open(filename, "w", mpi_comm()) do file
252
# Add context information as attributes
253
attributes(file)["ndims"] = ndims(mesh)
254
attributes(file)["equations"] = get_name(equations)
255
attributes(file)["polydeg"] = polydeg(dg)
256
attributes(file)["n_vars"] = n_vars
257
attributes(file)["n_elements"] = nelementsglobal(mesh, dg, cache)
258
attributes(file)["mesh_type"] = get_name(mesh)
259
attributes(file)["mesh_file"] = splitdir(mesh.current_filename)[2]
260
attributes(file)["time"] = convert(Float64, time) # Ensure that `time` is written as a double precision scalar
261
attributes(file)["dt"] = convert(Float64, dt) # Ensure that `dt` is written as a double precision scalar
262
attributes(file)["timestep"] = timestep
263
264
# Store each variable of the solution data
265
for v in 1:n_vars
266
# Need to create dataset explicitly in parallel case
267
var = create_dataset(file, "/variables_$v", datatype(eltype(data)),
268
dataspace((ndofsglobal(mesh, dg, cache),)))
269
# Write data of each process in slices (ranks start with 0)
270
slice = (cum_node_counts[mpi_rank() + 1] + 1):cum_node_counts[mpi_rank() + 2]
271
# Convert to 1D array
272
var[slice] = vec(data[v, .., :])
273
# Add variable name as attribute
274
attributes(var)["name"] = varnames(solution_variables, equations)[v]
275
end
276
277
# Store element variables
278
for (v, (key, element_variable)) in enumerate(element_variables)
279
# Need to create dataset explicitly in parallel case
280
var = create_dataset(file, "/element_variables_$v",
281
datatype(eltype(element_variable)),
282
dataspace((nelementsglobal(mesh, dg, cache),)))
283
284
# Write data of each process in slices (ranks start with 0)
285
slice = (cum_element_counts[mpi_rank() + 1] + 1):cum_element_counts[mpi_rank() + 2]
286
# Add to file
287
var[slice] = element_variable
288
# Add variable name as attribute
289
attributes(var)["name"] = string(key)
290
end
291
292
# Store node variables
293
for (v, (key, node_variable)) in enumerate(node_variables)
294
# Need to create dataset explicitly in parallel case
295
var = create_dataset(file, "/node_variables_$v",
296
datatype(eltype(node_variable)),
297
dataspace((nelementsglobal(mesh, dg, cache) *
298
element_size,)))
299
300
# Write data of each process in slices (ranks start with 0)
301
slice = (cum_node_counts[mpi_rank() + 1] + 1):cum_node_counts[mpi_rank() + 2]
302
# Add to file
303
var[slice] = node_variable
304
# Add variable name as attribute
305
attributes(var)["name"] = string(key)
306
end
307
end
308
309
return filename
310
end
311
312
function save_solution_file_on_root(data, time, dt, timestep, n_vars,
313
mesh::Union{ParallelTreeMesh, ParallelP4estMesh,
314
ParallelT8codeMesh},
315
equations, dg::DG, cache,
316
solution_variables, filename,
317
element_variables = Dict{Symbol, Any}(),
318
node_variables = Dict{Symbol, Any}())
319
320
# Calculate element and node counts by MPI rank
321
element_size = nnodes(dg)^ndims(mesh)
322
element_counts = convert(Vector{Cint}, collect(cache.mpi_cache.n_elements_by_rank))
323
node_counts = element_counts * Cint(element_size)
324
325
# non-root ranks only send data
326
if !mpi_isroot()
327
# Send nodal data to root
328
for v in 1:n_vars
329
MPI.Gatherv!(vec(data[v, .., :]), nothing, mpi_root(), mpi_comm())
330
end
331
332
# Send element data to root
333
for (key, element_variable) in element_variables
334
MPI.Gatherv!(element_variable, nothing, mpi_root(), mpi_comm())
335
end
336
337
# Send additional/extra node variables to root
338
for (key, node_variable) in node_variables
339
MPI.Gatherv!(node_variable, nothing, mpi_root(), mpi_comm())
340
end
341
342
return filename
343
end
344
345
# Open file (clobber existing content)
346
h5open(filename, "w") do file
347
# Add context information as attributes
348
attributes(file)["ndims"] = ndims(mesh)
349
attributes(file)["equations"] = get_name(equations)
350
attributes(file)["polydeg"] = polydeg(dg)
351
attributes(file)["n_vars"] = n_vars
352
attributes(file)["n_elements"] = nelementsglobal(mesh, dg, cache)
353
attributes(file)["mesh_type"] = get_name(mesh)
354
attributes(file)["mesh_file"] = splitdir(mesh.current_filename)[2]
355
attributes(file)["time"] = convert(Float64, time) # Ensure that `time` is written as a double precision scalar
356
attributes(file)["dt"] = convert(Float64, dt) # Ensure that `dt` is written as a double precision scalar
357
attributes(file)["timestep"] = timestep
358
359
# Store each variable of the solution data
360
for v in 1:n_vars
361
# Convert to 1D array
362
recv = Vector{eltype(data)}(undef, sum(node_counts))
363
MPI.Gatherv!(vec(data[v, .., :]), MPI.VBuffer(recv, node_counts),
364
mpi_root(), mpi_comm())
365
file["variables_$v"] = recv
366
367
# Add variable name as attribute
368
var = file["variables_$v"]
369
attributes(var)["name"] = varnames(solution_variables, equations)[v]
370
end
371
372
# Store element variables
373
for (v, (key, element_variable)) in enumerate(element_variables)
374
# Add to file
375
recv = Vector{eltype(data)}(undef, sum(element_counts))
376
MPI.Gatherv!(element_variable, MPI.VBuffer(recv, element_counts),
377
mpi_root(), mpi_comm())
378
file["element_variables_$v"] = recv
379
380
# Add variable name as attribute
381
var = file["element_variables_$v"]
382
attributes(var)["name"] = string(key)
383
end
384
385
# Store node variables
386
for (v, (key, node_variable)) in enumerate(node_variables)
387
# Add to file
388
recv = Vector{eltype(data)}(undef, sum(node_counts))
389
MPI.Gatherv!(node_variable, MPI.VBuffer(recv, node_counts),
390
mpi_root(), mpi_comm())
391
file["node_variables_$v"] = recv
392
393
# Add variable name as attribute
394
var = file["node_variables_$v"]
395
attributes(var)["name"] = string(key)
396
end
397
end
398
399
return filename
400
end
401
end # @muladd
402
403