Skip to content

Commit

Permalink
Merge #1117
Browse files Browse the repository at this point in the history
1117: extend write_exodus for any topology r=juliasloan25 a=juliasloan25



Co-authored-by: Julia Sloan <jsloan@caltech.edu>
  • Loading branch information
bors[bot] and juliasloan25 committed Feb 2, 2023
2 parents 40ac99d + ebe724b commit e361847
Show file tree
Hide file tree
Showing 4 changed files with 123 additions and 14 deletions.
16 changes: 16 additions & 0 deletions .buildkite/pipeline.yml
Expand Up @@ -20,6 +20,9 @@ steps:
- echo "--- Instantiate test"
- "julia --project=test -e 'using Pkg; Pkg.develop(path=\".\"); Pkg.instantiate(;verbose=true); Pkg.precompile(;strict=true)'"

- echo "--- Instantiate lib/ClimaCoreTempestRemap"
- "julia --project=lib/ClimaCoreTempestRemap -e 'using Pkg; Pkg.develop(path=\".\"); Pkg.instantiate(;verbose=true); Pkg.precompile(;strict=true)'"

- echo "--- Instantiate perf"
- "julia --project=perf -e 'using Pkg; Pkg.instantiate(;verbose=true); Pkg.precompile(;strict=true)'"

Expand Down Expand Up @@ -108,6 +111,19 @@ steps:
slurm_nodes: 3
slurm_tasks_per_node: 1

- label: "TempestRemap MPI testing"
key: "write_exodus_mpi"
command:
- "srun julia --color=yes --project=lib/ClimaCoreTempestRemap lib/ClimaCoreTempestRemap/test/mpi_tests/exodus.jl"
timeout_in_minutes: 5
env:
CLIMACORE_DISTRIBUTED: "MPI"
retry:
automatic: true
agents:
slurm_nodes: 3
slurm_tasks_per_node: 1

- group: "Column examples"
steps:

Expand Down
1 change: 1 addition & 0 deletions lib/ClimaCoreTempestRemap/Project.toml
Expand Up @@ -5,6 +5,7 @@ version = "0.3.7"

[deps]
ClimaComms = "3a4d1b5c-c61d-41fd-a00a-5873ba7a1b0d"
ClimaCommsMPI = "5f86816e-8b66-43b2-912e-75384f99de49"
ClimaCore = "d414da3d-4745-48bb-8d80-42e94e092884"
Dates = "ade2ca70-3891-5945-98fb-dc099432e06a"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
Expand Down
57 changes: 43 additions & 14 deletions lib/ClimaCoreTempestRemap/src/exodus.jl
Expand Up @@ -9,6 +9,9 @@ for use with TempestRemap.
Note: the generated meshes will use a different ordering of nodes and elements
than those generated by TempestRemap itself.
When using this function with a distributed topology input for MPI, it should
only be called on a single process.
Options:
- `normalize_coordinates`: if true, the coordinates are normalized to be on the
unit sphere (this is required for use with TempestRemap)
Expand All @@ -21,37 +24,63 @@ function write_exodus(
filename,
topology::T;
normalize_coordinates = true,
) where {T <: Topologies.Topology2D{<:ClimaComms.SingletonCommsContext}}
) where {T <: Topologies.Topology2D}

len_string = 33
len_line = 81
four = 4
time_step = 0

num_elem = Topologies.nlocalelems(topology)
num_nodes = length(Topologies.local_vertices(topology))
elem_order = topology.elemorder
num_elem = length(topology.elemorder)
num_nodes = topology.nglobalvertices
num_dim = Geometry.ncomponents(Meshes.coordinate_type(topology))
num_qa_rec = 1
num_el_blk = 1
num_el_in_blk1 = num_elem
num_nod_per_el1 = 4
num_att_in_blk1 = 1

connect1 = Array{Int32}(undef, (num_nod_per_el1, num_elem)) # array of unique vertex indices for each element
coord = Array{Float64}(undef, (num_nodes, num_dim)) # array of coordinates for each unique vertex
# array of unique vertex indices for each element
connect1 = zeros(Int32, (num_nod_per_el1, num_el_in_blk1))
# array of coordinates for each unique vertex
coord = NTuple{num_dim, Float64}[]

for (uv, vertex) in enumerate(Topologies.local_vertices(topology))
for (e, v) in vertex
connect1[v, e] = uv
end
(e, v) = first(vertex)
c = Float64.(Geometry.components(Meshes.coordinates(topology, e, v)))
if normalize_coordinates
c = c ./ norm(c)
global_vert = 0
for (e, elem) in enumerate(elem_order)
for vert in 1:4
if connect1[vert, e] != 0
continue
else
global_vert += 1
for (selem, svert) in
Meshes.SharedVertices(topology.mesh, elem, vert)
se = topology.orderindex[selem]
connect1[svert, se] = global_vert
end

c =
Float64.(
Tuple(
Geometry.components(
Meshes.coordinates(topology.mesh, elem, vert),
),
)
)
if normalize_coordinates
c = c ./ norm(c)
end
push!(coord, c)
end
end
coord[uv, :] .= c
end

# convert coord to a matrix so it's accepted by NCDatasets
coord = [coord[v][i] for v in 1:num_nodes, i in 1:num_dim]

@assert maximum(connect1) == num_nodes
@assert minimum(connect1) == 1

# init_data
NCDataset(filename, "c") do dts

Expand Down
63 changes: 63 additions & 0 deletions lib/ClimaCoreTempestRemap/test/mpi_tests/exodus.jl
@@ -0,0 +1,63 @@
import ClimaCore
using ClimaComms
using ClimaCommsMPI
using ClimaCore: Geometry, Meshes, Domains, Topologies, Spaces
using NCDatasets
using TempestRemap_jll
using Test
using ClimaCoreTempestRemap

OUTPUT_DIR = mkpath(get(ENV, "CI_OUTPUT_DIR", tempname()))

@testset "write_exodus using distributed topology" begin
comms_ctx = ClimaCommsMPI.MPICommsContext()
pid, nprocs = ClimaComms.init(comms_ctx)

# generate CC mesh
ne = 9
R = 1
domain = Domains.SphereDomain(R)
mesh = Meshes.EquiangularCubedSphere(domain, ne)
topology = Topologies.Topology2D(comms_ctx, mesh)
meshfile_cc = joinpath(OUTPUT_DIR, "test_cc.g")

if ClimaComms.iamroot(comms_ctx)
write_exodus(meshfile_cc, topology)
end
ClimaComms.barrier(comms_ctx)

if ClimaComms.iamroot(comms_ctx)
# generate TR mesh
meshfile_tr = joinpath(OUTPUT_DIR, "test_tr.g")
run(
`$(TempestRemap_jll.GenerateCSMesh_exe()) --res $ne --alt --file $meshfile_tr`,
)
# tempest remap-generated mesh should have the same number of elements
@test Meshes.nelements(mesh) ==
NCDataset(nc -> nc.dim["num_elem"], meshfile_tr)

# compute overlap mesh
meshfile_overlap = joinpath(OUTPUT_DIR, "test_overlap.g")
run(
`$(TempestRemap_jll.GenerateOverlapMesh_exe()) --a $meshfile_cc --b $meshfile_tr --out $meshfile_overlap`,
)
# the overlap mesh should have the same number of elements as the input meshes
@test Meshes.nelements(mesh) ==
NCDataset(nc -> nc.dim["num_elem"], meshfile_overlap)

# compute weight file
weightfile = joinpath(OUTPUT_DIR, "test_weight.nc")
run(
`$(TempestRemap_jll.GenerateOfflineMap_exe()) --in_mesh $meshfile_cc --out_mesh $meshfile_tr --ov_mesh $meshfile_overlap --in_np 1 --out_map $weightfile`,
)
# fractions should be close to 1
frac_a = NCDataset(nc -> Array(nc["frac_a"]), weightfile)
@test all(x -> x 1, frac_a)

# compute overlap
run(
`$(TempestRemap_jll.GenerateOfflineMap_exe()) --in_mesh $meshfile_cc --out_mesh $meshfile_tr --ov_mesh $meshfile_overlap --in_np 2 --out_map $weightfile`,
)
end
ClimaComms.barrier(comms_ctx)
end

0 comments on commit e361847

Please sign in to comment.