Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
trixi-framework
GitHub Repository: trixi-framework/Trixi.jl
Path: blob/main/src/meshes/parallel_tree_mesh.jl
2055 views
1
# By default, Julia/LLVM does not use fused multiply-add operations (FMAs).
2
# Since these FMAs can increase the performance of many numerical algorithms,
3
# we need to opt-in explicitly.
4
# See https://ranocha.de/blog/Optimizing_EC_Trixi for further details.
5
@muladd begin
6
#! format: noindent
7
8
"""
9
partition!(mesh::ParallelTreeMesh, allow_coarsening=true)
10
11
Partition `mesh` using a static domain decomposition algorithm
12
based on leaf cell count and tree structure.
13
If `allow_coarsening` is `true`, the algorithm will keep leaf cells together
14
on one rank when needed for local coarsening (i.e. when all children of a cell are leaves).
15
"""
16
function partition!(mesh::ParallelTreeMesh; allow_coarsening = true)
17
# Determine number of leaf cells per rank
18
leaves = leaf_cells(mesh.tree)
19
@assert length(leaves)>mpi_nranks() "Too many ranks to properly partition the mesh!"
20
n_leaves_per_rank = OffsetArray(fill(div(length(leaves), mpi_nranks()),
21
mpi_nranks()),
22
0:(mpi_nranks() - 1))
23
for rank in 0:(rem(length(leaves), mpi_nranks()) - 1)
24
n_leaves_per_rank[rank] += 1
25
end
26
@assert sum(n_leaves_per_rank) == length(leaves)
27
28
# Assign MPI ranks to all cells such that all ancestors of each cell - if not yet assigned to a
29
# rank - belong to the same rank
30
mesh.first_cell_by_rank = similar(n_leaves_per_rank)
31
mesh.n_cells_by_rank = similar(n_leaves_per_rank)
32
33
leaf_count = 0
34
# Assign first cell to rank 0 (employ depth-first indexing of cells)
35
mesh.first_cell_by_rank[0] = 1
36
# Iterate over all ranks
37
for rank in 0:(mpi_nranks() - 1)
38
leaf_count += n_leaves_per_rank[rank]
39
last_id = leaves[leaf_count]
40
parent_id = mesh.tree.parent_ids[last_id]
41
42
# If coarsening is allowed, we need to make sure that parents of leaves
43
# are on the same rank as the leaves when coarsened.
44
if allow_coarsening &&
45
# Check if all children of the last parent are leaves
46
all(id -> is_leaf(mesh.tree, id), @view mesh.tree.child_ids[:, parent_id]) &&
47
rank < length(n_leaves_per_rank) - 1 # Make sure there is another rank
48
49
# To keep children of parent together if they are all leaves,
50
# all children are added to this rank
51
additional_cells = (last_id + 1):mesh.tree.child_ids[end, parent_id]
52
if length(additional_cells) > 0
53
last_id = additional_cells[end]
54
55
additional_leaves = count(id -> is_leaf(mesh.tree, id),
56
additional_cells)
57
leaf_count += additional_leaves
58
# Add leaves to this rank, remove from next rank
59
n_leaves_per_rank[rank] += additional_leaves
60
n_leaves_per_rank[rank + 1] -= additional_leaves
61
end
62
end
63
64
@assert all(n -> n > 0, n_leaves_per_rank) "Too many ranks to properly partition the mesh!"
65
66
mesh.n_cells_by_rank[rank] = last_id - mesh.first_cell_by_rank[rank] + 1
67
# Use depth-first indexing of cells again to assign also non leaf cells
68
mesh.tree.mpi_ranks[mesh.first_cell_by_rank[rank]:last_id] .= rank
69
70
# Set first cell of next rank
71
if rank < length(n_leaves_per_rank) - 1 # Make sure there is another rank
72
mesh.first_cell_by_rank[rank + 1] = mesh.first_cell_by_rank[rank] +
73
mesh.n_cells_by_rank[rank]
74
end
75
end
76
77
@assert all(x -> x >= 0, mesh.tree.mpi_ranks[1:length(mesh.tree)])
78
@assert sum(mesh.n_cells_by_rank) == length(mesh.tree)
79
80
return nothing
81
end
82
83
function get_restart_mesh_filename(restart_filename, mpi_parallel::True)
84
# Get directory name
85
dirname, _ = splitdir(restart_filename)
86
87
if mpi_isroot()
88
# Read mesh filename from restart file
89
mesh_file = ""
90
h5open(restart_filename, "r") do file
91
mesh_file = read(attributes(file)["mesh_file"])
92
end
93
94
buffer = Vector{UInt8}(mesh_file)
95
MPI.Bcast!(Ref(length(buffer)), mpi_root(), mpi_comm())
96
MPI.Bcast!(buffer, mpi_root(), mpi_comm())
97
else # non-root ranks
98
count = MPI.Bcast!(Ref(0), mpi_root(), mpi_comm())
99
buffer = Vector{UInt8}(undef, count[])
100
MPI.Bcast!(buffer, mpi_root(), mpi_comm())
101
mesh_file = String(buffer)
102
end
103
104
# Construct and return filename
105
return joinpath(dirname, mesh_file)
106
end
107
end # @muladd
108
109