Skip to content

Commit

Permalink
Merge pull request #272 from Deltares/sum_scalar_output
Browse files Browse the repository at this point in the history
Add `sum` to reducer options
  • Loading branch information
JoostBuitink committed Jun 12, 2023
2 parents b7fca5c + 67bac80 commit 0b923a8
Show file tree
Hide file tree
Showing 4 changed files with 20 additions and 15 deletions.
3 changes: 2 additions & 1 deletion docs/src/changelog.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
functions, except `BMI.initialize`. Also it returns "s" instead of "seconds since
1970-01-01T00:00:00", in line with the BMI specification.
- Added the `interception` component to total actual evapotranspiration `actevap` of `SBM`
(was defined as the sum of soil evaporation, transpiration and open water evaporation).
(was defined as the sum of soil evaporation, transpiration and open water evaporation).

### Changed
- The time values returned in the BMI interface are no longer in seconds since 1970, but in
Expand All @@ -35,6 +35,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- For (regulated) lakes with rating curve of type 1 (H-Q table), lake `storage` above the
`maximumstorage` (based on maximum water level from the H-Q table) is spilled
instantaneously (overflow) from the lake.
- Added support to use `sum` as a reducer function for csv and scalar output options.

## v0.6.3 - 2023-03-01

Expand Down
1 change: 1 addition & 0 deletions docs/src/user_guide/step2_settings_file.md
Original file line number Diff line number Diff line change
Expand Up @@ -286,6 +286,7 @@ with the following available reducers:
+ minimum
+ mean
+ median
+ sum
+ first
+ last
+ only
Expand Down
1 change: 1 addition & 0 deletions src/io.jl
Original file line number Diff line number Diff line change
Expand Up @@ -1214,6 +1214,7 @@ function reducerfunction(reducer::AbstractString)
"first" => first,
"last" => last,
"only" => only,
"sum" => sum,
)
f = get(functionmap, reducer, nothing)
isnothing(f) && error("unknown reducer")
Expand Down
30 changes: 16 additions & 14 deletions test/io.jl
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ config = Wflow.Config(tomlpath)
# modifiers can also be applied
kvconf = Wflow.get_alias(config.input.vertical, "kv_0", "kv₀", nothing)
@test kvconf isa Wflow.Config
ncname, modifier = Wflow.ncvar_name_modifier(kvconf, config = config)
ncname, modifier = Wflow.ncvar_name_modifier(kvconf, config=config)
@test ncname === "KsatVer"
@test modifier.scale == 1.0
@test modifier.offset == 0.0
Expand Down Expand Up @@ -63,7 +63,7 @@ end
# mock a NCReader object
ncpath = Wflow.input_path(config, config.input.path_forcing)
ds = NCDataset(ncpath)
reader = (; dataset = ds)
reader = (; dataset=ds)

# if these keys are missing, they are derived from the NetCDF
pop!(Dict(config), "starttime")
Expand Down Expand Up @@ -232,10 +232,12 @@ end
@testset "reducer" begin
V = [6, 5, 4, 1]
@test Wflow.reducerfunction("maximum")(V) == 6
@test Wflow.reducerfunction("minimum")(V) == 1
@test Wflow.reducerfunction("mean")(V) == 4
@test Wflow.reducerfunction("median")(V) == 4.5
@test Wflow.reducerfunction("first")(V) == 6
@test Wflow.reducerfunction("last")(V) == 1
@test Wflow.reducerfunction("sum")(V) == 16
@test_throws ErrorException Wflow.reducerfunction("other")
end

Expand Down Expand Up @@ -290,7 +292,7 @@ Wflow.load_dynamic_input!(model)
]
end

Wflow.close_files(model, delete_output = false)
Wflow.close_files(model, delete_output=false)

@testset "NetCDF creation" begin
path = Base.Filesystem.tempname()
Expand Down Expand Up @@ -319,17 +321,17 @@ end
@test Wflow.internal_dim_name(:latitude) == :y
@test Wflow.internal_dim_name(:time) == :time

@test_throws ArgumentError Wflow.read_dims(ds["c"], (x = :, y = :))
@test_throws ArgumentError Wflow.read_dims(ds["LAI"], (x = :, y = :))
data, data_dim_order = Wflow.read_dims(ds["wflow_dem"], (x = :, y = :))
@test_throws ArgumentError Wflow.read_dims(ds["c"], (x=:, y=:))
@test_throws ArgumentError Wflow.read_dims(ds["LAI"], (x=:, y=:))
data, data_dim_order = Wflow.read_dims(ds["wflow_dem"], (x=:, y=:))
@test data isa Matrix{Union{Float32,Missing}}
@test data[end, end] === missing
@test data[125, 1] 647.187f0
@test data_dim_order == (:x, :y)

@test Wflow.dim_directions(ds, (:x, :y)) === (x = true, y = false)
@test Wflow.dim_directions(ds, (:x, :y)) === (x=true, y=false)
@test Wflow.dim_directions(ds, (:y, :x, :layer)) ===
(y = false, x = true, layer = true)
(y=false, x=true, layer=true)

data, dims = Wflow.permute_data(zeros(1, 2, 3), (:layer, :y, :x))
@test size(data) == (3, 2, 1)
Expand All @@ -341,17 +343,17 @@ end

data = collect(reshape(1:6, (2, 3)))
# flip y, which is the second dimension
@test Wflow.reverse_data!(data, (y = false, x = true))[1, :] == [5, 3, 1]
@test Wflow.reverse_data!(data, (y=false, x=true))[1, :] == [5, 3, 1]
# and mutate it back, the NamedTuple order should not matter
@test Wflow.reverse_data!(data, (x = true, y = false))[1, :] == [1, 3, 5]
@test Wflow.reverse_data!(data, (x=true, y=false))[1, :] == [1, 3, 5]
# flip both dimensions at the same time
data = Wflow.reverse_data!(data, (x = false, y = false))
data = Wflow.reverse_data!(data, (x=false, y=false))
@test data[1, :] == [6, 4, 2]
@test data[:, 1] == [6, 5]

data = Wflow.read_standardized(ds, "wflow_dem", (x = :, y = :))
data = Wflow.read_standardized(ds, "wflow_dem", (x=:, y=:))
# since in this case only the second dimension needs reversing, we can easily do it manually
manual_fix = reverse(ds["wflow_dem"]; dims = 2)
manual_fix = reverse(ds["wflow_dem"]; dims=2)
@test all(data .=== manual_fix)
end
end
Expand All @@ -362,7 +364,7 @@ end
@test Wflow.parse_loglevel(0) == Logging.Info

tomlpath = joinpath(@__DIR__, "sbm_simple.toml")
Wflow.run(tomlpath; silent = true)
Wflow.run(tomlpath; silent=true)

config = Wflow.Config(tomlpath)
output = normpath(abspath(Wflow.get(config, "dir_output", ".")))
Expand Down

0 comments on commit 0b923a8

Please sign in to comment.