Skip to content

Commit

Permalink
Merge 6b133cb into 9c8d8f5
Browse files Browse the repository at this point in the history
  • Loading branch information
pat-schmitt committed Apr 7, 2023
2 parents 9c8d8f5 + 6b133cb commit 7dcd6e0
Show file tree
Hide file tree
Showing 5 changed files with 156 additions and 47 deletions.
18 changes: 3 additions & 15 deletions oggm/core/dynamic_spinup.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def run_dynamic_spinup(gdir, init_model_filesuffix=None, init_model_yr=None,
store_model_geometry=True, store_fl_diagnostics=None,
store_model_evolution=True, ignore_errors=False,
return_t_bias_best=False, ye=None,
model_flowline_filesuffix='', make_compatible=False,
model_flowline_filesuffix='',
add_fixed_geometry_spinup=False, **kwargs):
"""Dynamically spinup the glacier to match area or volume at the RGI date.
Expand Down Expand Up @@ -180,14 +180,6 @@ def run_dynamic_spinup(gdir, init_model_filesuffix=None, init_model_yr=None,
suffix to the model_flowlines filename to use (if no other flowlines
are provided with init_model_filesuffix or init_model_fls).
Default is ''
make_compatible : bool
if set to true this will add all variables to the resulting dataset
so it could be combined with any other one. This is necessary if
different spinup methods are used. For example if using the dynamic
spinup and setting fixed geometry spinup as fallback, the variable
'is_fixed_geometry_spinup' must be added to the dynamic spinup so
it is possible to compile both glaciers together.
Default is False
add_fixed_geometry_spinup : bool
If True and the original spinup_period must be shortened (due to
ice-free or out-of-boundary error) a fixed geometry spinup is added at
Expand Down Expand Up @@ -326,8 +318,7 @@ def save_model_without_dynamic_spinup():
yr_run,
geom_path=geom_path,
diag_path=diag_path,
fl_diag_path=fl_diag_path,
make_compatible=make_compatible)
fl_diag_path=fl_diag_path)

return model_dynamic_spinup_end

Expand Down Expand Up @@ -425,8 +416,7 @@ def run_model_with_spinup_to_rgi_date(t_bias):
diag_path=diag_path,
fl_diag_path=fl_diag_path,
dynamic_spinup_min_ice_thick=min_ice_thickness,
fixed_geometry_spinup_yr=fixed_geometry_spinup_yr,
make_compatible=make_compatible)
fixed_geometry_spinup_yr=fixed_geometry_spinup_yr)

# now we delete the min_h variable again if it was not
# included before (inplace)
Expand Down Expand Up @@ -1186,7 +1176,6 @@ def dynamic_melt_f_run_with_dynamic_spinup(
store_model_geometry=store_model_geometry,
store_fl_diagnostics=store_fl_diagnostics,
model_flowline_filesuffix=model_flowline_filesuffix,
make_compatible=True,
add_fixed_geometry_spinup=add_fixed_geometry_spinup,
**kwargs)
# save the temperature bias which was successful in the last iteration
Expand Down Expand Up @@ -1412,7 +1401,6 @@ def dynamic_melt_f_run_with_dynamic_spinup_fallback(
store_fl_diagnostics=store_fl_diagnostics,
ignore_errors=False,
ye=ye,
make_compatible=True,
add_fixed_geometry_spinup=add_fixed_geometry_spinup,
**kwargs)

Expand Down
14 changes: 0 additions & 14 deletions oggm/core/flowline.py
Original file line number Diff line number Diff line change
Expand Up @@ -933,7 +933,6 @@ def run_until_and_store(self, y1,
stop_criterion=None,
fixed_geometry_spinup_yr=None,
dynamic_spinup_min_ice_thick=None,
make_compatible=False
):
"""Runs the model and returns intermediate steps in xarray datasets.
Expand Down Expand Up @@ -992,13 +991,6 @@ def run_until_and_store(self, y1,
area or the total volume. This is useful to smooth out yearly
fluctuations when matching to observations. The names of this new
variables include the suffix _min_h (e.g. 'area_m2_min_h')
make_compatible : bool
if set to true this will add all variables to the resulting dataset
so it could be combined with any other one. This is necessary if
different spinup methods are used. For example if using the dynamic
spinup and setting fixed geometry spinup as fallback, the variable
'is_fixed_geometry_spinup' must be added to the dynamic spinup so
it is possible to compile both glaciers together.
Returns
-------
geom_ds : xarray.Dataset or None
Expand Down Expand Up @@ -1161,12 +1153,6 @@ def run_until_and_store(self, y1,
desc = 'Part of the series which are spinup'
diag_ds['is_fixed_geometry_spinup'].attrs['description'] = desc
diag_ds['is_fixed_geometry_spinup'].attrs['unit'] = '-'
elif make_compatible:
is_spinup_time = np.full(len(monthly_time), False, dtype=bool)
diag_ds['is_fixed_geometry_spinup'] = ('time', is_spinup_time)
desc = 'Part of the series which are spinup'
diag_ds['is_fixed_geometry_spinup'].attrs['description'] = desc
diag_ds['is_fixed_geometry_spinup'].attrs['unit'] = '-'

fl_diag_dss = None
if do_fl_diag:
Expand Down
8 changes: 4 additions & 4 deletions oggm/params.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -401,16 +401,16 @@ store_model_geometry = False

# What variables would you like to store in the diagnostics files
# Currently available in standard files:
# volume, volume_bsl, volume_bwl, area, length, calving, calving_rate,
# terminus_thick_i (with i in 0..9).
# volume, volume_bsl, volume_bwl, area, area_min_h, length, calving,
# calving_rate, terminus_thick_i (with i in 0..9).
# And with additional hydro output:
# off_area, on_area,
# melt_off_glacier, melt_on_glacier,
# liq_prcp_off_glacier, liq_prcp_on_glacier, snowfall_off_glacier, snowfall_on_glacier,
# Probably useful for debugging only
# melt_residual_off_glacier, melt_residual_on_glacier
# model_mb, residual_mb, snow_bucket,
# You need to kee all variables in one line unfortunately
# You need to keep all variables in one line unfortunately
store_diagnostic_variables = volume, volume_bsl, volume_bwl, area, length, calving, calving_rate, off_area, on_area, melt_off_glacier, melt_on_glacier, liq_prcp_off_glacier, liq_prcp_on_glacier, snowfall_off_glacier, snowfall_on_glacier

# Whether to store the model flowline diagnostic files during operational runs
Expand All @@ -419,5 +419,5 @@ store_diagnostic_variables = volume, volume_bsl, volume_bwl, area, length, calv
store_fl_diagnostics = False
# What variables would you like to store in the flowline diagnostics files
# Note: area and volume are mandatory
# You need to kee all variables in one line unfortunately
# You need to keep all variables in one line unfortunately
store_fl_diagnostic_variables = area, thickness, volume, volume_bsl, volume_bwl, calving_bucket, ice_velocity
74 changes: 74 additions & 0 deletions oggm/tests/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
from oggm.exceptions import (InvalidParamsError, InvalidDEMError,
InvalidWorkflowError,
DownloadVerificationFailedException)
from oggm.core import flowline


pytestmark = pytest.mark.test_env("utils")
Expand Down Expand Up @@ -477,6 +478,79 @@ def test_ref_data_manager(self):
assert type(cfg.DATA) is multiprocessing.managers.DictProxy


class TestWorkflowUtils:
def test_compile_run_output(self, hef_gdir, hef_copy_gdir):

# Here I test if compile_run_output works with different data variables
gdirs = [hef_gdir, hef_copy_gdir]
filesuffix = '_compile_run_output_test'
cfg.PARAMS['store_model_geometry'] = True

# all allowed data variables, for testing in compile_run_output
allowed_data_vars = ['volume', 'volume_bsl', 'volume_bwl', 'area',
'area_min_h', 'length', 'calving', 'calving_rate',
'off_area', 'on_area',
'melt_off_glacier', 'melt_on_glacier',
'liq_prcp_off_glacier', 'liq_prcp_on_glacier',
'snowfall_off_glacier', 'snowfall_on_glacier',
'melt_residual_off_glacier',
'melt_residual_on_glacier', 'model_mb',
'residual_mb', 'snow_bucket']
for gi in range(10):
allowed_data_vars += [f'terminus_thick_{gi}']

# we run the first gdir excluding the variables 'area_min_h' (2d) and
# 'melt_on_glacier' (3d, with store_monthly_hydro=True)
def remove_diag_var(variable):
try:
cfg.PARAMS['store_diagnostic_variables'].remove(variable)
except ValueError as err:
if str(err) == 'list.remove(x): x not in list':
pass
else:
raise
remove_diag_var('area_min_h')
remove_diag_var('melt_on_glacier')
flowline.run_with_hydro(gdirs[0],
run_task=flowline.run_constant_climate,
store_monthly_hydro=True,
y0=1980, halfsize=1, nyears=2,
output_filesuffix=filesuffix)

# the second gdir run includes all allowed data variables
cfg.PARAMS['store_diagnostic_variables'] = allowed_data_vars
flowline.run_with_hydro(gdirs[1],
run_task=flowline.run_constant_climate,
store_monthly_hydro=True,
y0=1980, halfsize=1, nyears=2,
output_filesuffix=filesuffix)

# We need to test two things:
# First: If their is a variable in the first diagnostics file which is
# not present in the second one no KeyError should be raised.
ds_1 = utils.compile_run_output([gdirs[0], gdirs[1]],
input_filesuffix=filesuffix)

def check_result(ds):
assert 'area_m2_min_h' in ds.data_vars
assert 'melt_on_glacier' in ds.data_vars
assert 'melt_on_glacier_monthly' in ds.data_vars
assert np.all(np.isnan(
ds.loc[{'rgi_id': gdirs[0].rgi_id}]['area_m2_min_h'].values))
assert np.all(np.isnan(
ds.loc[{'rgi_id': gdirs[0].rgi_id}]['melt_on_glacier'].values))
assert np.all(np.isnan(
ds.loc[{'rgi_id': gdirs[0].rgi_id}]['melt_on_glacier_monthly'].values))

check_result(ds_1)

# Second: If a variable in the second diagnostics file which is not
# present in the first one it should still be included in the result.
ds_2 = utils.compile_run_output([gdirs[1], gdirs[0]],
input_filesuffix=filesuffix)
check_result(ds_2)


class TestStartFromTar(unittest.TestCase):

def setUp(self):
Expand Down
89 changes: 75 additions & 14 deletions oggm/utils/_workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -1101,9 +1101,31 @@ def compile_run_output(gdirs, path=True, input_filesuffix='',
# Get the dimensions of all this
rgi_ids = [gd.rgi_id for gd in gdirs]

# To find the longest time, we have to open all files unfortunately
# To find the longest time, we have to open all files unfortunately, we
# also create a list of all data variables (in case not all files contain
# the same data variables), and finally we decide on the name of "3d"
# variables in case we have daily
time_info = {}
time_keys = ['hydro_year', 'hydro_month', 'calendar_year', 'calendar_month']
allowed_data_vars = ['volume_m3', 'volume_bsl_m3', 'volume_bwl_m3',
'area_m2', 'area_m2_min_h', 'length_m', 'calving_m3',
'calving_rate_myr', 'off_area',
'on_area', 'model_mb', 'is_fixed_geometry_spinup']
for gi in range(10):
allowed_data_vars += [f'terminus_thick_{gi}']
# this hydro variables can be _monthly or _daily
hydro_vars = ['melt_off_glacier', 'melt_on_glacier',
'liq_prcp_off_glacier', 'liq_prcp_on_glacier',
'snowfall_off_glacier', 'snowfall_on_glacier',
'melt_residual_off_glacier', 'melt_residual_on_glacier',
'snow_bucket', 'residual_mb']
for v in hydro_vars:
allowed_data_vars += [v]
allowed_data_vars += [v + '_monthly']
allowed_data_vars += [v + '_daily']
data_vars = {}
name_2d_dim = 'month_2d'
contains_3d_data = False
for gd in gdirs:
fp = gd.get_filepath('model_diagnostics', filesuffix=input_filesuffix)
try:
Expand Down Expand Up @@ -1132,6 +1154,41 @@ def compile_run_output(gdirs, path=True, input_filesuffix='',
time_info[cn] = np.append(ds.variables[cn][:p],
time_info[cn])

# check if their are new data variables and add them
for vn in ds.variables:
# exclude time variables
if vn in ['month_2d', 'calendar_month_2d',
'hydro_month_2d']:
name_2d_dim = 'month_2d'
contains_3d_data = True
elif vn in ['day_2d', 'calendar_day_2d', 'hydro_day_2d']:
name_2d_dim = 'day_2d'
contains_3d_data = True
elif vn in allowed_data_vars:
# check if data variable is new
if vn not in data_vars.keys():
data_vars[vn] = dict()
data_vars[vn]['dims'] = ds.variables[vn].dimensions
data_vars[vn]['attrs'] = dict()
for attr in ds.variables[vn].ncattrs():
if attr not in ['_FillValue', 'coordinates',
'dtype']:
data_vars[vn]['attrs'][attr] = getattr(
ds.variables[vn], attr)
elif vn not in ['time'] + time_keys:
# This check has future developments in mind.
# If you end here it means the current data variable is
# not under the allowed_data_vars OR not under the
# defined time dimensions. If it is a new data variable
# add it to allowed_data_vars above (also add it to
# test_compile_run_output). If it is a new dimension
# handle it in the if/elif statements.
raise InvalidParamsError(f'The data variable "{vn}" '
'is not known. Is it new or '
'is it a new dimension? '
'Check comment above this '
'raise for more info!')

# If this worked, keep it as template
ppath = fp
except FileNotFoundError:
Expand Down Expand Up @@ -1165,19 +1222,15 @@ def compile_run_output(gdirs, path=True, input_filesuffix='',
ds.coords[cn] = ('time', time_info[cn])
ds[cn].attrs['description'] = ds_diag[cn].attrs['description']

# We decide on the name of "3d" variables in case we have daily
# output
name_2d_dim = 'day_2d' if 'day_2d' in ds_diag.dims else 'month_2d'

# Prepare the 2D variables
shape = (len(time), len(rgi_ids))
out_2d = dict()
for vn in ds_diag.data_vars:
if name_2d_dim in ds_diag[vn].dims:
for vn in data_vars:
if name_2d_dim in data_vars[vn]['dims']:
continue
var = dict()
var['data'] = np.full(shape, np.nan)
var['attrs'] = ds_diag[vn].attrs
var['attrs'] = data_vars[vn]['attrs']
out_2d[vn] = var

# 1D Variables
Expand All @@ -1196,20 +1249,20 @@ def compile_run_output(gdirs, path=True, input_filesuffix='',

# Maybe 3D?
out_3d = dict()
if name_2d_dim in ds_diag.dims:
if contains_3d_data:
# We have some 3d vars
month_2d = ds_diag[name_2d_dim]
ds.coords[name_2d_dim] = (name_2d_dim, month_2d.data)
cn = f'calendar_{name_2d_dim}'
ds.coords[cn] = (name_2d_dim, ds_diag[cn].values)

shape = (len(time), len(month_2d), len(rgi_ids))
for vn in ds_diag.data_vars:
if name_2d_dim not in ds_diag[vn].dims:
for vn in data_vars:
if name_2d_dim not in data_vars[vn]['dims']:
continue
var = dict()
var['data'] = np.full(shape, np.nan)
var['attrs'] = ds_diag[vn].attrs
var['attrs'] = data_vars[vn]['attrs']
out_3d[vn] = var

# Read out
Expand All @@ -1222,9 +1275,17 @@ def compile_run_output(gdirs, path=True, input_filesuffix='',
a = np.nonzero(time == it[0])[0][0]
b = np.nonzero(time == it[-1])[0][0] + 1
for vn, var in out_2d.items():
var['data'][a:b, i] = ds_diag.variables[vn][:]
# try statement if some data variables not in all files
try:
var['data'][a:b, i] = ds_diag.variables[vn][:]
except KeyError:
pass
for vn, var in out_3d.items():
var['data'][a:b, :, i] = ds_diag.variables[vn][:]
# try statement if some data variables not in all files
try:
var['data'][a:b, :, i] = ds_diag.variables[vn][:]
except KeyError:
pass
for vn, var in out_1d.items():
var['data'][i] = ds_diag.getncattr(vn)
except FileNotFoundError:
Expand Down

0 comments on commit 7dcd6e0

Please sign in to comment.