diff --git a/docs/src/model_setup/boundary_conditions.md b/docs/src/model_setup/boundary_conditions.md index 805351e917..0a2d653467 100644 --- a/docs/src/model_setup/boundary_conditions.md +++ b/docs/src/model_setup/boundary_conditions.md @@ -283,13 +283,13 @@ IncompressibleModel{CPU, Float64}(time = 0 seconds, iteration = 0) julia> model.velocities.u Field located at (Face, Center, Center) -├── data: OffsetArrays.OffsetArray{Float64,3,Array{Float64,3}}, size: (18, 18, 18) +├── data: OffsetArrays.OffsetArray{Float64,3,Array{Float64,3}}, size: (16, 16, 16) ├── grid: RegularRectilinearGrid{Float64, Periodic, Periodic, Bounded}(Nx=16, Ny=16, Nz=16) └── boundary conditions: x=(west=Periodic, east=Periodic), y=(south=Periodic, north=Periodic), z=(bottom=Value, top=Value) julia> model.tracers.T Field located at (Center, Center, Center) -├── data: OffsetArrays.OffsetArray{Float64,3,Array{Float64,3}}, size: (18, 18, 18) +├── data: OffsetArrays.OffsetArray{Float64,3,Array{Float64,3}}, size: (16, 16, 16) ├── grid: RegularRectilinearGrid{Float64, Periodic, Periodic, Bounded}(Nx=16, Ny=16, Nz=16) └── boundary conditions: x=(west=Periodic, east=Periodic), y=(south=Periodic, north=Periodic), z=(bottom=Gradient, top=Value) ``` diff --git a/docs/src/model_setup/tracers.md b/docs/src/model_setup/tracers.md index 763d705af6..d3141a2339 100644 --- a/docs/src/model_setup/tracers.md +++ b/docs/src/model_setup/tracers.md @@ -26,13 +26,13 @@ whose fields can be accessed via `model.tracers.T` and `model.tracers.S`. ```jldoctest tracers julia> model.tracers.T Field located at (Center, Center, Center) -├── data: OffsetArrays.OffsetArray{Float64,3,Array{Float64,3}}, size: (66, 66, 66) +├── data: OffsetArrays.OffsetArray{Float64,3,Array{Float64,3}}, size: (64, 64, 64) ├── grid: RegularRectilinearGrid{Float64, Periodic, Periodic, Bounded}(Nx=64, Ny=64, Nz=64) └── boundary conditions: x=(west=Periodic, east=Periodic), y=(south=Periodic, north=Periodic), z=(bottom=ZeroFlux, top=ZeroFlux) julia> model.tracers.S Field located at (Center, Center, Center) -├── data: OffsetArrays.OffsetArray{Float64,3,Array{Float64,3}}, size: (66, 66, 66) +├── data: OffsetArrays.OffsetArray{Float64,3,Array{Float64,3}}, size: (64, 64, 64) ├── grid: RegularRectilinearGrid{Float64, Periodic, Periodic, Bounded}(Nx=64, Ny=64, Nz=64) └── boundary conditions: x=(west=Periodic, east=Periodic), y=(south=Periodic, north=Periodic), z=(bottom=ZeroFlux, top=ZeroFlux) diff --git a/src/AbstractOperations/AbstractOperations.jl b/src/AbstractOperations/AbstractOperations.jl index bb764784c1..33e9d4fa4d 100644 --- a/src/AbstractOperations/AbstractOperations.jl +++ b/src/AbstractOperations/AbstractOperations.jl @@ -26,7 +26,7 @@ import Oceananigans.Fields: data, compute_at! ##### Basic functionality ##### -abstract type AbstractOperation{X, Y, Z, A, G, T} <: AbstractField{X, Y, Z, A, G, T} end +abstract type AbstractOperation{X, Y, Z, A, G, T} <: AbstractField{X, Y, Z, A, G, T, 3} end const AF = AbstractField diff --git a/src/BuoyancyModels/buoyancy_field.jl b/src/BuoyancyModels/buoyancy_field.jl index e291097942..a8c3468407 100644 --- a/src/BuoyancyModels/buoyancy_field.jl +++ b/src/BuoyancyModels/buoyancy_field.jl @@ -11,14 +11,14 @@ import Oceananigans.Fields: compute!, compute_at! import Oceananigans: short_show -struct BuoyancyField{B, S, A, D, G, T, C} <: AbstractDataField{Center, Center, Center, A, G, T} +struct BuoyancyField{B, S, A, D, G, T, C} <: AbstractDataField{Center, Center, Center, A, G, T, 3} data :: D architecture :: A grid :: G buoyancy :: B tracers :: C status :: S - + """ BuoyancyField(data, grid, buoyancy, tracers) diff --git a/src/Fields/Fields.jl b/src/Fields/Fields.jl index f0616b46cd..3b11f5abf3 100644 --- a/src/Fields/Fields.jl +++ b/src/Fields/Fields.jl @@ -1,15 +1,14 @@ module Fields -export - Face, Center, - AbstractField, Field, - CenterField, XFaceField, YFaceField, ZFaceField, - ReducedField, AveragedField, ComputedField, KernelComputedField, BackgroundField, - interior, data, - xnode, ynode, znode, location, - set!, compute!, @compute, - VelocityFields, TracerFields, tracernames, PressureFields, TendencyFields, - interpolate, FieldSlicer +export Face, Center +export AbstractField, AbstractDataField, Field +export CenterField, XFaceField, YFaceField, ZFaceField +export ReducedField, AveragedField, ComputedField, KernelComputedField, BackgroundField +export interior, data +export xnode, ynode, znode, location +export set!, compute!, @compute +export VelocityFields, TracerFields, tracernames, PressureFields, TendencyFields +export interpolate, FieldSlicer using Oceananigans.Architectures using Oceananigans.Grids @@ -36,4 +35,8 @@ include("show_fields.jl") include("broadcasting_abstract_fields.jl") include("mapreduce_abstract_fields.jl") +# Fallback: cannot infer boundary conditions. +boundary_conditions(field) = nothing +boundary_conditions(f::Union{Field, ReducedField, ComputedField, KernelComputedField}) = f.boundary_conditions + end diff --git a/src/Fields/abstract_field.jl b/src/Fields/abstract_field.jl index 4e57b6e9eb..c83ef9a635 100644 --- a/src/Fields/abstract_field.jl +++ b/src/Fields/abstract_field.jl @@ -21,20 +21,21 @@ const ArchOrNothing = Union{AbstractArchitecture, Nothing} const GridOrNothing = Union{AbstractGrid, Nothing} """ - AbstractField{X, Y, Z, A, G, T} + AbstractField{X, Y, Z, A, G, T, N} Abstract supertype for fields located at `(X, Y, Z)` on architecture `A` -and defined on a grid `G` with eltype `T`. +and defined on a grid `G` with eltype `T` and `N` dimensions. """ -abstract type AbstractField{X, Y, Z, A <: ArchOrNothing, G <: GridOrNothing, T} <: AbstractArray{T, 3} end +abstract type AbstractField{X, Y, Z, A <: ArchOrNothing, G <: GridOrNothing, T, N} <: AbstractArray{T, N} end """ - AbstractDataField{X, Y, Z, A, G} + AbstractDataField{X, Y, Z, A, G, T, N} Abstract supertype for fields with concrete data in settable underlying arrays, -located at `(X, Y, Z)` on architecture `A` and defined on a grid `G` with eltype `T`. +located at `(X, Y, Z)` on architecture `A` and defined on a grid `G` with eltype `T` +and `N` dimensions. """ -abstract type AbstractDataField{X, Y, Z, A, G, T} <: AbstractField{X, Y, Z, A, G, T} end +abstract type AbstractDataField{X, Y, Z, A, G, T, N} <: AbstractField{X, Y, Z, A, G, T, N} end Base.IndexStyle(::AbstractField) = IndexCartesian() @@ -186,7 +187,7 @@ Base.fill!(f::AbstractDataField, val) = fill!(parent(f), val) # Don't use axes(f) to checkbounds; use axes(f.data) Base.checkbounds(f::AbstractField, I...) = Base.checkbounds(f.data, I...) -@propagate_inbounds Base.getindex(f::AbstractDataField, i, j, k) = f.data[i, j, k] +@propagate_inbounds Base.getindex(f::AbstractDataField, inds...) = getindex(f.data, inds...) # Linear indexing @propagate_inbounds Base.getindex(f::AbstractDataField, i::Int) = parent(f)[i] @@ -223,8 +224,6 @@ znodes(ψ::AbstractField) = znodes(location(ψ, 3), ψ.grid) nodes(ψ::AbstractField; kwargs...) = nodes(location(ψ), ψ.grid; kwargs...) -Base.iterate(f::AbstractDataField, state=1) = iterate(f.data, state) - ##### ##### fill_halo_regions! ##### @@ -235,53 +234,49 @@ fill_halo_regions!(field::AbstractField, arch, args...) = fill_halo_regions!(fie ##### Field reductions ##### +const AbstractGPUDataField = AbstractDataField{X, Y, Z, GPU} where {X, Y, Z} + """ minimum(field::AbstractDataField; dims=:) - Compute the minimum value of an Oceananigans `field` over the given dimensions (not including halo points). By default all dimensions are included. """ -minimum(field::AbstractDataField; dims=:) = minimum(interior_copy(field); dims=dims) +minimum(field::AbstractGPUDataField; dims=:) = minimum(interior_copy(field); dims=dims) """ minimum(f, field::AbstractDataField; dims=:) - Returns the smallest result of calling the function `f` on each element of an Oceananigans `field` (not including halo points) over the given dimensions. By default all dimensions are included. """ -minimum(f, field::AbstractDataField; dims=:) = minimum(f, interior_copy(field); dims=dims) +minimum(f, field::AbstractGPUDataField; dims=:) = minimum(f, interior_copy(field); dims=dims) """ maximum(field::AbstractDataField; dims=:) - Compute the maximum value of an Oceananigans `field` over the given dimensions (not including halo points). By default all dimensions are included. """ -maximum(field::AbstractDataField; dims=:) = maximum(interior_copy(field); dims=dims) +maximum(field::AbstractGPUDataField; dims=:) = maximum(interior_copy(field); dims=dims) """ maximum(f, field::AbstractDataField; dims=:) - Returns the largest result of calling the function `f` on each element of an Oceananigans `field` (not including halo points) over the given dimensions. By default all dimensions are included. """ -maximum(f, field::AbstractDataField; dims=:) = maximum(f, interior_copy(field); dims=dims) +maximum(f, field::AbstractGPUDataField; dims=:) = maximum(f, interior_copy(field); dims=dims) """ mean(field::AbstractDataField; dims=:) - Compute the mean of an Oceananigans `field` over the given dimensions (not including halo points). By default all dimensions are included. """ -mean(field::AbstractDataField; dims=:) = mean(interior_copy(field); dims=dims) +mean(field::AbstractGPUDataField; dims=:) = mean(interior_copy(field); dims=dims) """ mean(f::Function, field::AbstractDataField; dims=:) - Apply the function `f` to each element of an Oceananigans `field` and take the mean over dimensions `dims` (not including halo points). By default all dimensions are included. """ -mean(f::Function, field::AbstractDataField; dims=:) = mean(f, interior_copy(field); dims=dims) +mean(f::Function, field::AbstractGPUDataField; dims=:) = mean(f, interior_copy(field); dims=dims) # Risky to use these without tests. Docs would also be nice. Statistics.norm(a::AbstractField) = sqrt(mapreduce(x -> x * x, +, interior(a))) diff --git a/src/Fields/computed_field.jl b/src/Fields/computed_field.jl index 8a1bd9887c..aeb1fefdf8 100644 --- a/src/Fields/computed_field.jl +++ b/src/Fields/computed_field.jl @@ -4,7 +4,7 @@ using KernelAbstractions: @kernel, @index, Event using Oceananigans.Grids using Oceananigans.BoundaryConditions: fill_halo_regions! -struct ComputedField{X, Y, Z, S, O, A, D, G, T, C} <: AbstractDataField{X, Y, Z, A, G, T} +struct ComputedField{X, Y, Z, S, O, A, D, G, T, C} <: AbstractDataField{X, Y, Z, A, G, T, 3} data :: D architecture :: A grid :: G @@ -76,7 +76,7 @@ function ComputedField(LX, LY, LZ, operand, arch, grid; data = nothing, recompute_safely = true, boundary_conditions = ComputedFieldBoundaryConditions(grid, (LX, LY, LZ))) - + # Architecturanigans operand_arch = architecture(operand) arch = isnothing(operand_arch) ? arch : operand_arch @@ -105,7 +105,7 @@ function compute!(comp::ComputedField{LX, LY, LZ}, time=nothing) where {LX, LY, include_right_boundaries=true, location=(LX, LY, LZ)) - compute_kernel! = _compute!(device(arch), workgroup, worksize) + compute_kernel! = _compute!(device(arch), workgroup, worksize) event = compute_kernel!(comp.data, comp.operand; dependencies=Event(device(arch))) diff --git a/src/Fields/field.jl b/src/Fields/field.jl index 41c08f0df7..deaa12bb16 100644 --- a/src/Fields/field.jl +++ b/src/Fields/field.jl @@ -1,6 +1,6 @@ using Adapt -struct Field{X, Y, Z, A, D, G, T, B} <: AbstractDataField{X, Y, Z, A, G, T} +struct Field{X, Y, Z, A, D, G, T, B} <: AbstractDataField{X, Y, Z, A, G, T, 3} data :: D architecture :: A grid :: G @@ -29,7 +29,7 @@ julia> using Oceananigans, Oceananigans.Grids julia> ω = Field(Face, Face, Center, CPU(), RegularRectilinearGrid(size=(1, 1, 1), extent=(1, 1, 1))) Field located at (Face, Face, Center) -├── data: OffsetArrays.OffsetArray{Float64,3,Array{Float64,3}}, size: (3, 3, 3) +├── data: OffsetArrays.OffsetArray{Float64,3,Array{Float64,3}}, size: (1, 1, 1) ├── grid: RegularRectilinearGrid{Float64, Periodic, Periodic, Bounded}(Nx=1, Ny=1, Nz=1) └── boundary conditions: x=(west=Periodic, east=Periodic), y=(south=Periodic, north=Periodic), z=(bottom=ZeroFlux, top=ZeroFlux) ``` diff --git a/src/Fields/function_field.jl b/src/Fields/function_field.jl index 373e815f42..e3cd935044 100644 --- a/src/Fields/function_field.jl +++ b/src/Fields/function_field.jl @@ -1,5 +1,5 @@ -struct FunctionField{X, Y, Z, C, P, F, G, T} <: AbstractField{X, Y, Z, Nothing, G, T} +struct FunctionField{X, Y, Z, C, P, F, G, T} <: AbstractField{X, Y, Z, Nothing, G, T, 3} func :: F grid :: G clock :: C diff --git a/src/Fields/kernel_computed_field.jl b/src/Fields/kernel_computed_field.jl index 1d295aa31c..1f4aa318ec 100644 --- a/src/Fields/kernel_computed_field.jl +++ b/src/Fields/kernel_computed_field.jl @@ -2,7 +2,7 @@ using Oceananigans: AbstractModel using Oceananigans.Grids using Oceananigans.Utils: tupleit -struct KernelComputedField{X, Y, Z, A, S, D, G, T, K, B, F, P} <: AbstractDataField{X, Y, Z, A, G, T} +struct KernelComputedField{X, Y, Z, A, S, D, G, T, K, B, F, P} <: AbstractDataField{X, Y, Z, A, G, T, 3} data :: D architecture :: A grid :: G @@ -34,10 +34,10 @@ struct KernelComputedField{X, Y, Z, A, S, D, G, T, K, B, F, P} <: AbstractDataFi end """ - KernelComputedField(X, Y, Z, kernel, model; - boundary_conditions = ComputedFieldBoundaryConditions(grid, (X, Y, Z)), - computed_dependencies = (), - parameters = nothing, + KernelComputedField(X, Y, Z, kernel, model; + boundary_conditions = ComputedFieldBoundaryConditions(grid, (X, Y, Z)), + computed_dependencies = (), + parameters = nothing, data = nothing, recompute_safely = true) @@ -45,7 +45,7 @@ Builds a `KernelComputedField` at `X, Y, Z` computed with `kernel` and `model.ar `computed_dependencies` are an iterable of `AbstractField`s or other objects on which `compute!` is called prior to launching `kernel`. -`data` is a three-dimensional `OffsetArray` of scratch space where the kernel computation is stored. +`data` is a three-dimensional `OffsetArray` of scratch space where the kernel computation is stored. If `data=nothing` (the default) then additional memory will be allocated to store the `data` of `KernelComputedField`. @@ -57,10 +57,10 @@ Otherwise, `kernel` is launched with the function signature `kernel(data, grid, computed_dependencies..., parameters)` -`recompute_safely` (default: `true`) determines whether the `KernelComputedField` is "recomputed" if embedded in the expression -tree of another operation. - - If `recompute_safely=true`, the `KernelComputedField` is always recomputed. - - If `recompute_safely=false`, the `KernelComputedField` will not be recomputed if its status is up-to-date. +`recompute_safely` (default: `true`) determines whether the `KernelComputedField` is "recomputed" if embedded in the expression +tree of another operation. + - If `recompute_safely=true`, the `KernelComputedField` is always recomputed. + - If `recompute_safely=false`, the `KernelComputedField` will not be recomputed if its status is up-to-date. Example ======= @@ -111,7 +111,7 @@ function compute!(kcf::KernelComputedField{X, Y, Z}) where {X, Y, Z} tuple(kcf.data, kcf.grid, kcf.computed_dependencies...) : tuple(kcf.data, kcf.grid, kcf.computed_dependencies..., kcf.parameters) - event = launch!(arch, kcf.grid, :xyz, kcf.kernel, args...; + event = launch!(arch, kcf.grid, :xyz, kcf.kernel, args...; location=(X, Y, Z), include_right_boundaries=true) wait(device(arch), event) diff --git a/src/Fields/reduced_field.jl b/src/Fields/reduced_field.jl index 9e942de926..f7e9ef78b9 100644 --- a/src/Fields/reduced_field.jl +++ b/src/Fields/reduced_field.jl @@ -2,7 +2,11 @@ using Adapt import Oceananigans.BoundaryConditions: fill_halo_regions! -abstract type AbstractReducedField{X, Y, Z, A, G, T, N} <: AbstractDataField{X, Y, Z, A, G, T} end +##### +##### AbstractReducedField stuff +##### + +abstract type AbstractReducedField{X, Y, Z, A, G, T, N} <: AbstractDataField{X, Y, Z, A, G, T, 3} end const ARF = AbstractReducedField @@ -16,7 +20,7 @@ function validate_reduced_dims(dims) # Check type dims isa DimsType || error("Reduced dims must be an integer or tuple of integers.") - + # Check length length(dims) > 3 && error("Models are 3-dimensional. Cannot reduce over 4+ dimensions.") diff --git a/src/Fields/show_fields.jl b/src/Fields/show_fields.jl index 71cfb1d7b2..a663307046 100644 --- a/src/Fields/show_fields.jl +++ b/src/Fields/show_fields.jl @@ -14,14 +14,14 @@ short_show(field::AbstractField) = string(typeof(field).name.wrapper, " located short_show(field::AveragedField) = string("AveragedField over dims=$(field.dims) located at ", show_location(field), " of ", short_show(field.operand)) short_show(field::ComputedField) = string("ComputedField located at ", show_location(field), " of ", short_show(field.operand)) -Base.show(io::IO, field::AbstractField{X, Y, Z, A, G}) where {X, Y, Z, A, G} = +Base.show(io::IO, field::AbstractField{X, Y, Z, A}) where {X, Y, Z, A} = print(io, "$(short_show(field))\n", - "├── architecture: $A", - "└── grid: $G") + "├── architecture: $A\n", + "└── grid: $(short_show(field.grid))") Base.show(io::IO, field::Field) = print(io, "$(short_show(field))\n", - "├── data: $(typeof(field.data)), size: $(size(field.data))\n", + "├── data: $(typeof(field.data)), size: $(size(field))\n", "├── grid: $(short_show(field.grid))\n", "└── boundary conditions: $(short_show(field.boundary_conditions))") @@ -30,26 +30,26 @@ show_status(status) = "time=$(status.time)" Base.show(io::IO, field::AveragedField) = print(io, "$(short_show(field))\n", - "├── data: $(typeof(field.data)), size: $(size(field.data))\n", - "├── grid: $(short_show(field.grid))", '\n', - "├── dims: $(field.dims)", '\n', - "├── operand: $(short_show(field.operand))", '\n', - "└── status: ", show_status(field.status), '\n') + "├── data: $(typeof(field.data)), size: $(size(field))\n", + "├── grid: $(short_show(field.grid))\n", + "├── dims: $(field.dims)\n", + "├── operand: $(short_show(field.operand))\n", + "└── status: ", show_status(field.status)) Base.show(io::IO, field::ComputedField) = print(io, "$(short_show(field))\n", - "├── data: $(typeof(field.data)), size: $(size(field.data))\n", - "├── grid: $(short_show(field.grid))", '\n', - "├── operand: $(short_show(field.operand))", '\n', - "└── status: ", show_status(field.status), '\n') + "├── data: $(typeof(field.data)), size: $(size(field))\n", + "├── grid: $(short_show(field.grid))\n", + "├── operand: $(short_show(field.operand))\n", + "└── status: $(show_status(field.status))") Base.show(io::IO, field::KernelComputedField) = print(io, "$(short_show(field))\n", - "├── data: $(typeof(field.data)), size: $(size(field.data))\n", - "├── grid: $(short_show(field.grid))", '\n', - "├── computed_dependencies: $(Tuple(short_show(d) for d in field.computed_dependencies))", '\n', - "├── kernel: $(short_show(field.kernel))", '\n', - "└── status: ", show_status(field.status), '\n') + "├── data: $(typeof(field.data)), size: $(size(field))\n", + "├── grid: $(short_show(field.grid))\n", + "├── computed_dependencies: $(Tuple(short_show(d) for d in field.computed_dependencies))"\n, + "├── kernel: $(short_show(field.kernel))\n", + "└── status: $(show_status(field.status))") short_show(array::OffsetArray{T, D, A}) where {T, D, A} = string("OffsetArray{$T, $D, $A}") diff --git a/src/Fields/zero_field.jl b/src/Fields/zero_field.jl index d8ca8c8d78..cc6561e149 100644 --- a/src/Fields/zero_field.jl +++ b/src/Fields/zero_field.jl @@ -1,3 +1,3 @@ -struct ZeroField <: AbstractField{Nothing, Nothing, Nothing, Nothing, Nothing, Nothing} end +struct ZeroField <: AbstractField{Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, 3} end @inline Base.getindex(::ZeroField, i, j, k) = 0 diff --git a/src/Oceananigans.jl b/src/Oceananigans.jl index 31ec95ce09..5f7392259a 100644 --- a/src/Oceananigans.jl +++ b/src/Oceananigans.jl @@ -42,6 +42,7 @@ export # BuoyancyModels and equations of state Buoyancy, BuoyancyTracer, SeawaterBuoyancy, LinearEquationOfState, RoquetIdealizedNonlinearEquationOfState, TEOS10, + BuoyancyField, # Surface wave Stokes drift via Craik-Leibovich equations UniformStokesDrift, @@ -78,6 +79,9 @@ export FieldSlicer, NetCDFOutputWriter, JLD2OutputWriter, Checkpointer, TimeInterval, IterationInterval, AveragedTimeInterval, + # Output readers + FieldTimeSeries, FieldDataset, InMemory, OnDisk, + # Abstract operations ∂x, ∂y, ∂z, @at, @@ -181,6 +185,7 @@ include("Models/Models.jl") # Output and Physics, time-stepping, and models include("Diagnostics/Diagnostics.jl") include("OutputWriters/OutputWriters.jl") +include("OutputReaders/OutputReaders.jl") include("Simulations/Simulations.jl") # Abstractions for distributed and multi-region models @@ -209,6 +214,7 @@ using .Models using .TimeSteppers using .Diagnostics using .OutputWriters +using .OutputReaders using .Simulations using .AbstractOperations using .CubedSpheres diff --git a/src/OutputReaders/OutputReaders.jl b/src/OutputReaders/OutputReaders.jl new file mode 100644 index 0000000000..820f8f5430 --- /dev/null +++ b/src/OutputReaders/OutputReaders.jl @@ -0,0 +1,15 @@ +module OutputReaders + +export InMemory, OnDisk +export FieldTimeSeries, FieldDataset + +abstract type AbstractDataBackend end + +struct InMemory <: AbstractDataBackend end +struct OnDisk <: AbstractDataBackend end + +include("output_reader_utils.jl") +include("field_time_series.jl") +include("field_dataset.jl") + +end # module diff --git a/src/OutputReaders/field_dataset.jl b/src/OutputReaders/field_dataset.jl new file mode 100644 index 0000000000..bcbb2af530 --- /dev/null +++ b/src/OutputReaders/field_dataset.jl @@ -0,0 +1,23 @@ +""" + FieldTimeSeries(filepath, name; architecture=CPU(), backend=InMemory()) + +Returns a `Dict` containing a `FieldTimeSeries` for each field in the JLD2 file located at `filepath`. +Note that model output must have been saved with halos. The `InMemory` backend will store the data +fully in memory as a 4D multi-dimensional array while the `OnDisk` backend will lazily load field time +snapshots when the `FieldTimeSeries` is indexed linearly. +""" +function FieldDataset(filepath; architecture=CPU(), backend=InMemory()) + file = jldopen(filepath) + + field_names = keys(file["timeseries"]) + filter!(k -> k != "t", field_names) # Time is not a field. + + ds = Dict{String, FieldTimeSeries}( + name => FieldTimeSeries(filepath, name; architecture, backend) + for name in field_names + ) + + close(file) + + return ds +end diff --git a/src/OutputReaders/field_time_series.jl b/src/OutputReaders/field_time_series.jl new file mode 100644 index 0000000000..4fccfe7369 --- /dev/null +++ b/src/OutputReaders/field_time_series.jl @@ -0,0 +1,113 @@ +using Base: @propagate_inbounds + +using OffsetArrays +using JLD2 + +using Oceananigans.Architectures +using Oceananigans.Grids +using Oceananigans.Fields +using Oceananigans.Fields: show_location + +import Oceananigans: short_show + +struct FieldTimeSeries{X, Y, Z, K, A, T, N, D, G, B, χ} <: AbstractDataField{X, Y, Z, A, G, T, N} + data :: D + architecture :: A + grid :: G + boundary_conditions :: B + times :: χ + name :: String + filepath :: String + + function FieldTimeSeries{X, Y, Z}(backend::K, data::D, arch::A, grid::G, bcs::B, times::χ, name, filepath, N) where {X, Y, Z, K, D, A, G, B, χ} + T = eltype(grid) + return new{X, Y, Z, K, A, T, N, D, G, B, χ}(data, arch, grid, bcs, times, name, filepath) + end +end + +# Include the time dimension. +@inline Base.size(fts::FieldTimeSeries) = (size(location(fts), fts.grid)..., length(fts.times)) + +@propagate_inbounds Base.getindex(f::FieldTimeSeries{X, Y, Z, InMemory}, i, j, k, n) where {X, Y, Z} = f.data[i, j, k, n] + +""" + FieldTimeSeries(filepath, name; architecture=CPU(), backend=InMemory()) + +Returns a `FieldTimeSeries` for the field `name` describing a field's time history from a JLD2 file +located at `filepath`. Note that model output must have been saved with halos. The `InMemory` backend +will store the data fully in memory as a 4D multi-dimensional array while the `OnDisk` backend will +lazily load field time snapshots when the `FieldTimeSeries` is indexed linearly. +""" +FieldTimeSeries(filepath, name; architecture=CPU(), backend=InMemory()) = + FieldTimeSeries(filepath, name, architecture, backend) + +function FieldTimeSeries(filepath, name, architecture, backend::InMemory) + file = jldopen(filepath) + + grid = file["serialized/grid"] + Hx, Hy, Hz = halo_size(grid) + + iterations = parse.(Int, keys(file["timeseries/t"])) + times = [file["timeseries/t/$i"] for i in iterations] + + LX, LY, LZ = location = file["timeseries/$name/serialized/location"] + + Nt = length(times) + data_size = size(file["timeseries/$name/0"]) + + ArrayType = array_type(architecture) + raw_data = zeros(data_size..., Nt) |> ArrayType + data = offset_data(raw_data, grid, location) + + for (n, iter) in enumerate(iterations) + data.parent[:, :, :, n] .= file["timeseries/$name/$iter"] |> ArrayType + end + + bcs = file["timeseries/$name/serialized/boundary_conditions"] + + close(file) + + return FieldTimeSeries{LX, LY, LZ}(backend, data, architecture, grid, bcs, times, name, abspath(filepath), ndims(data)) +end + +function FieldTimeSeries(filepath, name, architecture, backend::OnDisk) + file = jldopen(filepath) + + grid = file["serialized/grid"] + iterations = parse.(Int, keys(file["timeseries/t"])) + times = [file["timeseries/t/$i"] for i in iterations] + + data = nothing + LX, LY, LZ = file["timeseries/$name/serialized/location"] + bcs = file["timeseries/$name/serialized/boundary_conditions"] + + close(file) + + return FieldTimeSeries{LX, LY, LZ}(backend, data, architecture, grid, bcs, times, name, abspath(filepath), 4) +end + +Base.getindex(fts::FieldTimeSeries{X, Y, Z, InMemory}, n::Int) where {X, Y, Z} = + Field((X, Y, Z), fts.architecture, fts.grid, fts.boundary_conditions, fts.data[:, :, :, n]) + +function Base.getindex(fts::FieldTimeSeries{X, Y, Z, OnDisk}, n::Int) where {X, Y, Z} + file = jldopen(fts.filepath) + iter = keys(file["timeseries/t"])[n] + raw_data = file["timeseries/$(fts.name)/$iter"] |> array_type(fts.architecture) + close(file) + + loc = (X, Y, Z) + field_data = offset_data(raw_data, fts.grid, loc) + return Field(loc, fts.architecture, fts.grid, fts.boundary_conditions, field_data) +end + +backend_str(::InMemory) = "InMemory" +backend_str(::OnDisk) = "OnDisk" + +short_show(fts::FieldTimeSeries{X, Y, Z, K}) where {X, Y, Z, K} = + string("$(join(size(fts), "×")) FieldTimeSeries{$(backend_str(K()))} located at $(show_location(fts))") + +Base.show(io::IO, fts::FieldTimeSeries{X, Y, Z, K, A}) where {X, Y, Z, K, A} = + print(io, "$(short_show(fts))\n", + "├── filepath: $(fts.filepath)\n", + "├── architecture: $A\n", + "└── grid: $(short_show(fts.grid))") diff --git a/src/OutputReaders/output_reader_utils.jl b/src/OutputReaders/output_reader_utils.jl new file mode 100644 index 0000000000..61565296b6 --- /dev/null +++ b/src/OutputReaders/output_reader_utils.jl @@ -0,0 +1,12 @@ +using Oceananigans.Grids: offset_indices + +import Oceananigans.Grids: offset_data + +function offset_data(underlying_data::AbstractArray{FT, 4}, loc, topo, N, H) where FT + ii = offset_indices(loc[1], topo[1], N[1], H[1]) + jj = offset_indices(loc[2], topo[2], N[2], H[2]) + kk = offset_indices(loc[3], topo[3], N[3], H[3]) + nn = axes(underlying_data, 4) + + return OffsetArray(underlying_data, ii, jj, kk, nn) +end diff --git a/src/OutputWriters/jld2_output_writer.jl b/src/OutputWriters/jld2_output_writer.jl index 1f92069c73..9a0a1ec699 100644 --- a/src/OutputWriters/jld2_output_writer.jl +++ b/src/OutputWriters/jld2_output_writer.jl @@ -3,6 +3,8 @@ using JLD2 using Oceananigans.Utils using Oceananigans.Utils: TimeInterval, pretty_filesize +using Oceananigans.Fields: boundary_conditions + """ JLD2OutputWriter{I, T, O, IF, IN, KW} <: AbstractOutputWriter @@ -52,14 +54,14 @@ Keyword arguments ## Filenaming - `prefix` (required): Descriptive filename prefixed to all output files. - + - `dir`: Directory to save output to. Default: "." (current working directory). ## Output frequency and time-averaging - + - `schedule` (required): `AbstractSchedule` that determines when output is saved. - + ## Slicing and type conversion prior to output - `field_slicer`: An object for slicing field output in ``(x, y, z)``, including omitting halos. @@ -75,15 +77,15 @@ Keyword arguments - `max_filesize`: The writer will stop writing to the output file once the file size exceeds `max_filesize`, and write to a new one with a consistent naming scheme ending in `part1`, `part2`, etc. Defaults to `Inf`. - + - `force`: Remove existing files if their filenames conflict. Default: `false`. ## Output file metadata management - + - `init`: A function of the form `init(file, model)` that runs when a JLD2 output file is initialized. Default: `noinit(args...) = nothing`. - + - `including`: List of model properties to save with every file. Default: `[:grid, :coriolis, :buoyancy, :closure]` @@ -91,10 +93,10 @@ Keyword arguments - `verbose`: Log what the output writer is doing with statistics on compute/write times and file sizes. Default: `false`. - + - `part`: The starting part number used if `max_filesize` is finite. Default: 1. - + - `jld2_kw`: Dict of kwargs to be passed to `jldopen` when data is written. Example @@ -166,7 +168,7 @@ function JLD2OutputWriter(model, outputs; prefix, schedule, # Convert each output to WindowedTimeAverage if schedule::AveragedTimeWindow is specified schedule, outputs = time_average_outputs(schedule, outputs, model, field_slicer) - + mkpath(dir) filepath = joinpath(dir, prefix * ".jld2") force && isfile(filepath) && rm(filepath, force=true) @@ -175,6 +177,17 @@ function JLD2OutputWriter(model, outputs; prefix, schedule, jldopen(filepath, "a+"; jld2_kw...) do file init(file, model) saveproperties!(file, model, including) + + # Serialize properties in `including`. + for property in including + file["serialized/$property"] = getproperty(model, property) + end + + # Serialize the location and boundary conditions of each output. + for (i, (field_name, field)) in enumerate(pairs(outputs)) + file["timeseries/$field_name/serialized/location"] = location(field) + file["timeseries/$field_name/serialized/boundary_conditions"] = boundary_conditions(field) + end end catch @warn "Could not initialize $filepath: data may already be initialized." diff --git a/test/runtests.jl b/test/runtests.jl index b3bd597c95..c193ae175d 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -145,6 +145,7 @@ group = get(ENV, "TEST_GROUP", :all) |> Symbol include("test_simulations.jl") include("test_diagnostics.jl") include("test_output_writers.jl") + include("test_output_readers.jl") include("test_lagrangian_particle_tracking.jl") end end diff --git a/test/test_field.jl b/test/test_field.jl index fb5e702b3e..e118dc2657 100644 --- a/test/test_field.jl +++ b/test/test_field.jl @@ -38,7 +38,7 @@ function run_field_reduction_tests(FT, arch) w = ZFaceField(arch, grid) c = CenterField(arch, grid) - f(x, y, z) = exp(x) * sin(y) * tanh(z) + f(x, y, z) = 1 + exp(x) * sin(y) * tanh(z) ϕs = (u, v, w, c) [set!(ϕ, f) for ϕ in ϕs] @@ -60,26 +60,30 @@ function run_field_reduction_tests(FT, arch) for (ϕ, ϕ_vals) in zip(ϕs, ϕs_vals) - @test all(ϕ .== ϕ_vals) # if this isn't true, reduction tests can't pass + ε = eps(maximum(ϕ_vals)) + + @test all(isapprox.(ϕ, ϕ_vals, atol=ε)) # if this isn't true, reduction tests can't pass # Important to make sure no CUDA scalar operations occur! CUDA.allowscalar(false) - @test minimum(ϕ) == minimum(ϕ_vals) - @test maximum(ϕ) == maximum(ϕ_vals) - @test mean(ϕ) == mean(ϕ_vals) - @test minimum(∛, ϕ) == minimum(∛, ϕ_vals) - @test maximum(abs, ϕ) == maximum(abs, ϕ_vals) - @test mean(abs2, ϕ) == mean(abs2, ϕ) + + @test minimum(ϕ) ≈ minimum(ϕ_vals) atol=ε + @test maximum(ϕ) ≈ maximum(ϕ_vals) atol=ε + @test mean(ϕ) ≈ mean(ϕ_vals) atol=2ε + @test minimum(∛, ϕ) ≈ minimum(∛, ϕ_vals) atol=ε + @test maximum(abs, ϕ) ≈ maximum(abs, ϕ_vals) atol=ε + @test mean(abs2, ϕ) ≈ mean(abs2, ϕ) atol=ε for dims in dims_to_test - @test minimum(ϕ, dims=dims) == minimum(ϕ_vals, dims=dims) - @test maximum(ϕ, dims=dims) == maximum(ϕ_vals, dims=dims) - @test mean(ϕ, dims=dims) == mean(ϕ_vals, dims=dims) + @test all(isapprox(minimum(ϕ, dims=dims), minimum(ϕ_vals, dims=dims), atol=2ε)) + @test all(isapprox(maximum(ϕ, dims=dims), maximum(ϕ_vals, dims=dims), atol=4ε)) + @test all(isapprox( mean(ϕ, dims=dims), mean(ϕ_vals, dims=dims), atol=4ε)) - @test minimum(sin, ϕ, dims=dims) == minimum(sin, ϕ_vals, dims=dims) - @test maximum(cos, ϕ, dims=dims) == maximum(cos, ϕ_vals, dims=dims) - @test mean(cosh, ϕ, dims=dims) == mean(cosh, ϕ, dims=dims) + @test all(isapprox(minimum(sin, ϕ, dims=dims), minimum(sin, ϕ_vals, dims=dims), atol=2ε)) + @test all(isapprox(maximum(cos, ϕ, dims=dims), maximum(cos, ϕ_vals, dims=dims), atol=2ε)) + @test all(isapprox( mean(cosh, ϕ, dims=dims), mean(cosh, ϕ_vals, dims=dims), atol=5ε)) end + CUDA.allowscalar(true) end diff --git a/test/test_output_readers.jl b/test/test_output_readers.jl new file mode 100644 index 0000000000..6f219f2f1d --- /dev/null +++ b/test/test_output_readers.jl @@ -0,0 +1,198 @@ +using Test +using Statistics +using JLD2 + +using Oceananigans +using Oceananigans.Architectures: array_type + +using Oceananigans.Fields: location + +function generate_some_interesting_simulation_data(Nx, Ny, Nz; architecture=CPU()) + grid = RegularRectilinearGrid(size=(Nx, Ny, Nz), extent=(64, 64, 32)) + + T_bcs = TracerBoundaryConditions(grid, top = FluxBoundaryCondition(5e-5), bottom = GradientBoundaryCondition(0.01)) + u_bcs = UVelocityBoundaryConditions(grid, top = FluxBoundaryCondition(-3e-4)) + + @inline Qˢ(x, y, t, S, evaporation_rate) = - evaporation_rate * S + evaporation_bc = FluxBoundaryCondition(Qˢ, field_dependencies=:S, parameters=3e-7) + S_bcs = TracerBoundaryConditions(grid, top=evaporation_bc) + + model = IncompressibleModel( + architecture = architecture, + grid = grid, + boundary_conditions = (u=u_bcs, T=T_bcs, S=S_bcs) + ) + + dTdz = 0.01 + Tᵢ(x, y, z) = 20 + dTdz * z + 1e-6 * randn() + uᵢ(x, y, z) = 1e-3 * randn() + set!(model, u=uᵢ, w=uᵢ, T=Tᵢ, S=35) + + wizard = TimeStepWizard(cfl=1.0, Δt=10.0, max_change=1.1, max_Δt=1minute) + simulation = Simulation(model, Δt=wizard, stop_time=2minutes) + + u, v, w = model.velocities + + computed_fields = ( + b = BuoyancyField(model), + ζ = ComputedField(∂x(v) - ∂y(u)), + ke = ComputedField(√(u^2 + v^2)) + ) + + fields_to_output = merge(model.velocities, model.tracers, computed_fields) + + simulation.output_writers[:jld2_3d_with_halos] = + JLD2OutputWriter(model, fields_to_output, + prefix = "test_3d_output_with_halos", + field_slicer = FieldSlicer(with_halos=true), + schedule = TimeInterval(30seconds), + force = true) + + profiles = NamedTuple{keys(fields_to_output)}(AveragedField(f, dims=(1, 2)) for f in fields_to_output) + + simulation.output_writers[:jld2_1d_with_halos] = + JLD2OutputWriter(model, profiles, + prefix = "test_1d_output_with_halos", + field_slicer = FieldSlicer(with_halos=true), + schedule = TimeInterval(30seconds), + force = true) + + run!(simulation) + + return nothing +end + +@testset "OutputReaders" begin + @info "Testing output readers..." + + Nx, Ny, Nz = 16, 10, 5 + generate_some_interesting_simulation_data(Nx, Ny, Nz) + Nt = 5 + + filepath3d = "test_3d_output_with_halos.jld2" + filepath1d = "test_1d_output_with_halos.jld2" + + for arch in archs + @testset "FieldTimeSeries{InMemory} [$(typeof(arch))]" begin + @info " Testing FieldTimeSeries{InMemory} [$(typeof(arch))]..." + + ## 3D Fields + + u3 = FieldTimeSeries(filepath3d, "u", architecture=arch) + v3 = FieldTimeSeries(filepath3d, "v", architecture=arch) + w3 = FieldTimeSeries(filepath3d, "w", architecture=arch) + T3 = FieldTimeSeries(filepath3d, "T", architecture=arch) + b3 = FieldTimeSeries(filepath3d, "b", architecture=arch) + ζ3 = FieldTimeSeries(filepath3d, "ζ", architecture=arch) + + @test location(u3) == (Face, Center, Center) + @test location(v3) == (Center, Face, Center) + @test location(w3) == (Center, Center, Face) + @test location(T3) == (Center, Center, Center) + @test location(b3) == (Center, Center, Center) + @test location(ζ3) == (Face, Face, Center) + + @test size(u3) == (Nx, Ny, Nz, Nt) + @test size(v3) == (Nx, Ny, Nz, Nt) + @test size(w3) == (Nx, Ny, Nz+1, Nt) + @test size(T3) == (Nx, Ny, Nz, Nt) + @test size(b3) == (Nx, Ny, Nz, Nt) + @test size(ζ3) == (Nx, Ny, Nz, Nt) + + ArrayType = array_type(arch) + for fts in (u3, v3, w3, T3, b3, ζ3) + @test fts.data.parent isa ArrayType + end + + @test u3[1, 2, 3, 4] isa Number + @test u3[1] isa Field + @test v3[2] isa Field + + ## 1D AveragedFields + + u1 = FieldTimeSeries(filepath1d, "u", architecture=arch) + v1 = FieldTimeSeries(filepath1d, "v", architecture=arch) + w1 = FieldTimeSeries(filepath1d, "w", architecture=arch) + T1 = FieldTimeSeries(filepath1d, "T", architecture=arch) + b1 = FieldTimeSeries(filepath1d, "b", architecture=arch) + ζ1 = FieldTimeSeries(filepath1d, "ζ", architecture=arch) + + @test location(u1) == (Nothing, Nothing, Center) + @test location(v1) == (Nothing, Nothing, Center) + @test location(w1) == (Nothing, Nothing, Face) + @test location(T1) == (Nothing, Nothing, Center) + @test location(b1) == (Nothing, Nothing, Center) + @test location(ζ1) == (Nothing, Nothing, Center) + + @test size(u1) == (1, 1, Nz, Nt) + @test size(v1) == (1, 1, Nz, Nt) + @test size(w1) == (1, 1, Nz+1, Nt) + @test size(T1) == (1, 1, Nz, Nt) + @test size(b1) == (1, 1, Nz, Nt) + @test size(ζ1) == (1, 1, Nz, Nt) + + for fts in (u1, v1, w1, T1, b1, ζ1) + @test fts.data.parent isa ArrayType + end + + @test u1[1, 1, 3, 4] isa Number + @test u1[1] isa Field + @test v1[2] isa Field + end + end + + for arch in archs + @testset "FieldTimeSeries{OnDisk} [$(typeof(arch))]" begin + @info " Testing FieldTimeSeries{OnDisk} [$(typeof(arch))]..." + + ArrayType = array_type(arch) + + ζ = FieldTimeSeries(filepath3d, "ζ", backend=OnDisk(), architecture=arch) + @test location(ζ) == (Face, Face, Center) + @test size(ζ) == (Nx, Ny, Nz, Nt) + @test ζ[1] isa Field + @test ζ[2] isa Field + @test ζ[1].data.parent isa ArrayType + + b = FieldTimeSeries(filepath1d, "b", backend=OnDisk(), architecture=arch) + @test location(b) == (Nothing, Nothing, Center) + @test size(b) == (1, 1, Nz, Nt) + @test b[1] isa Field + @test b[2] isa Field + end + end + + for arch in archs + @testset "FieldTimeSeries{InMemory} reductions" begin + @info " Testing FieldTimeSeries{InMemory} reductions..." + + for name in ("u", "v", "w", "T", "b", "ζ"), fun in (sum, mean, maximum, minimum) + f = FieldTimeSeries(filepath3d, name, architecture=CPU()) + + ε = eps(maximum(abs, f.data.parent)) + + val1 = fun(f) + val2 = fun([fun(f[n]) for n in 1:Nt]) + + @test val1 ≈ val2 atol=ε + end + end + end + + for Backend in [InMemory, OnDisk] + @testset "FieldDataset{$Backend}" begin + @info " Testing FieldDataset{$Backend}..." + + ds = FieldDataset(filepath3d, backend=Backend()) + + @test ds isa Dict + @test length(keys(ds)) == 8 + @test ds["u"] isa FieldTimeSeries + @test ds["v"][1] isa Field + @test ds["T"][2] isa Field + end + end + + rm("test_3d_output_with_halos.jld2") + rm("test_1d_output_with_halos.jld2") +end