Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add support for dir_input and dir_output #175

Merged
merged 1 commit into from
Nov 23, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions docs/src/changelog.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,12 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [Unreleased]

### Added
- Optional `dir_input` and `dir_output` keys in the TOML, which can be used to quickly
change the path for all input or output files that are given as a relative path.

## v0.5.0 - 2021-11-12

### Changed
Expand Down
20 changes: 11 additions & 9 deletions docs/src/config.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

[TOML](https://github.com/toml-lang/toml) is used as the configuration format for models
available in wflow. File paths included in the configuration TOML file are relative to the
TOML file location.
TOML file location, or to `dir_input` and `dir_output` if they are given.

## General time info
Time information is optional. When left out, each time step in the forcing NetCDF will be
Expand All @@ -21,6 +21,8 @@ endtime = 2000-02-01T00:00:00 # optional, default from forcing
starttime = 2000-01-01T00:00:00 # optional, default from forcing NetCDF
time_units = "days since 1900-01-01 00:00:00" # optional, this is default value
timestepsecs = 86400 # optional, default from forcing NetCDF
dir_input = "data/input" # optional, default is the path of the TOML
dir_output = "data/output" # optional, default is the path of the TOML
```

## Model section
Expand Down Expand Up @@ -53,8 +55,8 @@ the entire `state` section can be left out.

```toml
[state]
path_input = "data/instates-moselle.nc"
path_output = "data/outstates-moselle.nc"
path_input = "instates-moselle.nc"
path_output = "outstates-moselle.nc"

[state.vertical]
satwaterdepth = "satwaterdepth"
Expand Down Expand Up @@ -95,9 +97,9 @@ mapped a default value will be used if available.

```toml
[input]
# use "data/forcing-year-*.nc" if forcing files are split in time
path_forcing = "data/forcing-moselle.nc"
path_static = "data/staticmaps-moselle.nc"
# use "forcing-year-*.nc" if forcing files are split in time
path_forcing = "forcing-moselle.nc"
path_static = "staticmaps-moselle.nc"

# these are not directly part of the model
gauges = "wflow_gauges"
Expand Down Expand Up @@ -180,7 +182,7 @@ netCDF variables.

```toml
[output]
path = "data/output_moselle.nc"
path = "output_moselle.nc"

[output.vertical]
satwaterdepth = "satwaterdepth"
Expand Down Expand Up @@ -220,7 +222,7 @@ Delft-FEWS can ingest this data format directly.

```toml
[netcdf]
path = "data/output_scalar_moselle.nc"
path = "output_scalar_moselle.nc"

[[netcdf.variable]]
name = "Q"
Expand Down Expand Up @@ -268,7 +270,7 @@ list. You may specify as many entries as you wish.

```toml
[csv]
path = "data/output_moselle.csv"
path = "output_moselle.csv"

[[csv.column]]
header = "Q"
Expand Down
6 changes: 2 additions & 4 deletions src/hbv_model.jl
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,7 @@ Config object. Will return a Model that is ready to run.
"""
function initialize_hbv_model(config::Config)
# unpack the paths to the NetCDF files
tomldir = dirname(config)
static_path = joinpath(tomldir, config.input.path_static)
static_path = input_path(config, config.input.path_static)

reader = prepare_reader(config)
clock = Clock(config, reader)
Expand Down Expand Up @@ -357,7 +356,6 @@ function initialize_hbv_model(config::Config)
if do_lakes
lakes, lakeindex, lake, pits = initialize_natural_lake(
config,
static_path,
nc,
inds_riv,
nriv,
Expand Down Expand Up @@ -560,7 +558,7 @@ function initialize_hbv_model(config::Config)

# read and set states in model object if reinit=true
if reinit == false
instate_path = joinpath(tomldir, config.state.path_input)
instate_path = input_path(config, config.state.path_input)
state_ncnames = ncnames(config.state)
set_states(instate_path, model, state_ncnames; type = Float)
# update kinematic wave volume for river and land domain
Expand Down
43 changes: 30 additions & 13 deletions src/io.jl
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,23 @@ Base.dirname(config::Config) = dirname(pathof(config))
Base.iterate(config::Config) = iterate(Dict(config))
Base.iterate(config::Config, state) = iterate(Dict(config), state)

function combined_path(config::Config, dir::AbstractString, path::AbstractString)
tomldir = dirname(config)
return normpath(tomldir, dir, path)
end

"Construct a path relative to both the TOML directory and the optional `dir_input`"
function input_path(config::Config, path::AbstractString)
dir = get(config, "dir_input", ".")
return combined_path(config, dir, path)
end

"Construct a path relative to both the TOML directory and the optional `dir_output`"
function output_path(config::Config, path::AbstractString)
dir = get(config, "dir_output", ".")
return combined_path(config, dir, path)
end

"Extract NetCDF variable name `ncname` from `var` (type `String` or `Config`). If `var` has
type `Config`, either `scale` and `offset` are expected (with `ncname`) or a `value` (uniform
value), these are stored as part of `NamedTuple` `modifier`"
Expand Down Expand Up @@ -336,13 +353,13 @@ end

"prepare an output dataset for scalar data"
function setup_scalar_netcdf(
output_path,
path,
ncvars,
calendar,
time_units,
float_type = Float32,
)
ds = create_tracked_netcdf(output_path)
ds = create_tracked_netcdf(path)
defDim(ds, "time", Inf) # unlimited
defVar(
ds,
Expand Down Expand Up @@ -374,7 +391,7 @@ end

"prepare an output dataset for grid data"
function setup_grid_netcdf(
output_path,
path,
ncx,
ncy,
parameters,
Expand All @@ -385,7 +402,7 @@ function setup_grid_netcdf(
float_type = Float32,
)

ds = create_tracked_netcdf(output_path)
ds = create_tracked_netcdf(path)
defDim(ds, "time", Inf) # unlimited
if sizeinmetres
defVar(
Expand Down Expand Up @@ -537,18 +554,19 @@ struct Writer
end

function prepare_reader(config)
tomldir = dirname(config)
path_forcing = config.input.path_forcing
cyclic_path = joinpath(tomldir, config.input.path_static)
cyclic_path = input_path(config, config.input.path_static)

# absolute paths are not supported, see Glob.jl#2
if isabspath(path_forcing)
parts = splitpath(path_forcing)
# use the root/drive as the dir, to support * in directory names as well
glob_dir = parts[1]
glob_path = joinpath(parts[2:end])
glob_path = normpath(parts[2:end])
else
glob_dir = tomldir
tomldir = dirname(config)
dir_input = get(config, "dir_input", ".")
glob_dir = normpath(tomldir, dir_input)
glob_path = path_forcing
end

Expand Down Expand Up @@ -773,7 +791,6 @@ function prepare_writer(
nc_static;
maxlayers = nothing,
)
tomldir = dirname(config)
sizeinmetres = get(config.model, "sizeinmetres", false)::Bool

calendar = get(config, "calendar", "standard")::String
Expand All @@ -782,7 +799,7 @@ function prepare_writer(
# create an output NetCDF that will hold all timesteps of selected parameters for grid
# data but only if config.output.path has been set
if haskey(config, "output") && haskey(config.output, "path")
nc_path = joinpath(tomldir, config.output.path)
nc_path = output_path(config, config.output.path)
# create a flat mapping from internal parameter locations to NetCDF variable names
output_ncnames = ncnames(config.output)
# fill the output_map by mapping parameter NetCDF names to arrays
Expand All @@ -808,7 +825,7 @@ function prepare_writer(
if haskey(config, "state") && haskey(config.state, "path_output")
state_ncnames = ncnames(config.state)
state_map = out_map(state_ncnames, modelmap)
nc_state_path = joinpath(tomldir, config.state.path_output)
nc_state_path = output_path(config, config.state.path_output)
ds_outstate = setup_grid_netcdf(
nc_state_path,
x_nc,
Expand All @@ -829,7 +846,7 @@ function prepare_writer(
# create an output NetCDF that will hold all timesteps of selected parameters for scalar
# data, but only if config.netcdf.variable has been set.
if haskey(config, "netcdf") && haskey(config.netcdf, "variable")
nc_scalar_path = joinpath(tomldir, config.netcdf.path)
nc_scalar_path = output_path(config, config.netcdf.path)
# get NetCDF info for scalar data (variable name, locationset (dim) and
# location ids)
ncvars_dims = nc_variables_dims(config.netcdf.variable, nc_static, config)
Expand All @@ -851,7 +868,7 @@ function prepare_writer(

if haskey(config, "csv") && haskey(config.csv, "column")
# open CSV file and write header
csv_path = joinpath(tomldir, config.csv.path)
csv_path = output_path(config, config.csv.path)
# create directory if needed
mkpath(dirname(csv_path))
csv_io = open(csv_path, "w")
Expand Down
5 changes: 4 additions & 1 deletion src/reservoir_lake.jl
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,7 @@ end
end
end

function initialize_natural_lake(config, path, nc, inds_riv, nriv, pits, Δt)
function initialize_natural_lake(config, nc, inds_riv, nriv, pits, Δt)
# read only lake data if lakes true
# allow lakes only in river cells
# note that these locations are only the lake outlet pixels
Expand Down Expand Up @@ -318,6 +318,9 @@ function initialize_natural_lake(config, path, nc, inds_riv, nriv, pits, Δt)
sh = Vector{Union{SH,Missing}}(missing, n_lakes)
hq = Vector{Union{HQ,Missing}}(missing, n_lakes)
lowerlake_ind = fill(0, n_lakes)
# lake CSV parameter files are expected in the same directory as path_static
path = dirname(input_path(config, config.input.path_static))

for i = 1:n_lakes
lakeloc = lakelocs[i]
if linked_lakelocs[i] > 0
Expand Down
6 changes: 2 additions & 4 deletions src/sbm_gwf_model.jl
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,7 @@ Will return a Model that is ready to run.
function initialize_sbm_gwf_model(config::Config)

# unpack the paths to the NetCDF files
tomldir = dirname(config)
static_path = joinpath(tomldir, config.input.path_static)
static_path = input_path(config, config.input.path_static)

reader = prepare_reader(config)
clock = Clock(config, reader)
Expand Down Expand Up @@ -80,7 +79,6 @@ function initialize_sbm_gwf_model(config::Config)
if do_lakes
lakes, lakeindex, lake, pits = initialize_natural_lake(
config,
dirname(static_path),
nc,
inds_riv,
nriv,
Expand Down Expand Up @@ -443,7 +441,7 @@ function initialize_sbm_gwf_model(config::Config)

# read and set states in model object if reinit=false
if reinit == false
instate_path = joinpath(tomldir, config.state.path_input)
instate_path = input_path(config, config.state.path_input)
state_ncnames = ncnames(config.state)
set_states(instate_path, model, state_ncnames, type = Float)
# update kinematic wave volume for river and land domain
Expand Down
6 changes: 2 additions & 4 deletions src/sbm_model.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,7 @@ Config object. Will return a Model that is ready to run.
function initialize_sbm_model(config::Config)

# unpack the paths to the NetCDF files
tomldir = dirname(config)
static_path = joinpath(tomldir, config.input.path_static)
static_path = input_path(config, config.input.path_static)

reader = prepare_reader(config)
clock = Clock(config, reader)
Expand Down Expand Up @@ -68,7 +67,6 @@ function initialize_sbm_model(config::Config)
if do_lakes
lakes, lakeindex, lake, pits = initialize_natural_lake(
config,
dirname(static_path),
nc,
inds_riv,
nriv,
Expand Down Expand Up @@ -332,7 +330,7 @@ function initialize_sbm_model(config::Config)

# read and set states in model object if reinit=false
if reinit == false
instate_path = joinpath(tomldir, config.state.path_input)
instate_path = input_path(config, config.state.path_input)
state_ncnames = ncnames(config.state)
set_states(instate_path, model, state_ncnames; type = Float)
@unpack lateral, vertical = model
Expand Down
5 changes: 2 additions & 3 deletions src/sediment_model.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,7 @@ Config object. Will return a Model that is ready to run.
function initialize_sediment_model(config::Config)

# unpack the paths to the NetCDF files
tomldir = dirname(config)
static_path = joinpath(tomldir, config.input.path_static)
static_path = input_path(config, config.input.path_static)

reader = prepare_reader(config)
clock = Clock(config, reader)
Expand Down Expand Up @@ -143,7 +142,7 @@ function initialize_sediment_model(config::Config)

# read and set states in model object if reinit=false
if reinit == false
instate_path = joinpath(tomldir, config.state.path_input)
instate_path = input_path(config, config.state.path_input)
state_ncnames = ncnames(config.state)
set_states(instate_path, model, state_ncnames; type = Float)
end
Expand Down
12 changes: 6 additions & 6 deletions test/hbv_config.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@ time_units = "days since 1900-01-01 00:00:00"
timestepsecs = 86400

[state]
path_input = "data/instates-lahn.nc"
path_output = "data/outstates-lahn.nc"
path_input = "data/input/instates-lahn.nc"
path_output = "data/output/outstates-lahn.nc"

# if listed, the variable must be present in the NetCDF or error
# if not listed, the variable can get a default value if it has one
Expand All @@ -33,8 +33,8 @@ h = "h_land"
q = "q_land"

[input]
path_forcing = "data/forcing-lahn.nc"
path_static = "data/staticmaps-lahn.nc"
path_forcing = "data/input/forcing-lahn.nc"
path_static = "data/input/staticmaps-lahn.nc"

# these are not directly part of the model
gauges = "wflow_gauges"
Expand Down Expand Up @@ -93,7 +93,7 @@ snow = true
type = "hbv"

[output]
path = "data/output_lahn.nc"
path = "data/output/output_lahn.nc"

[output.vertical]
lowerzonestorage = "lowerzonestorage"
Expand All @@ -106,7 +106,7 @@ upperzonestorage = "upperzonestorage"
q = "q"

[csv]
path = "data/output_lahn.csv"
path = "data/output/output_lahn.csv"

[[csv.column]]
header = "Q"
Expand Down
Loading