Skip to content

Commit

Permalink
Merge ec74cf2 into b1e1623
Browse files Browse the repository at this point in the history
  • Loading branch information
fmaussion committed Feb 28, 2023
2 parents b1e1623 + ec74cf2 commit fcd8308
Show file tree
Hide file tree
Showing 34 changed files with 2,573 additions and 5,528 deletions.
2 changes: 0 additions & 2 deletions docs/_code/prepare_climate.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,6 @@
cfg.PARAMS['baseline_climate'] = 'HISTALP'
cfg.PARAMS['use_tstar_calibration'] = True
cfg.PARAMS['use_winter_prcp_factor'] = False
cfg.PARAMS['hydro_month_nh'] = 10
cfg.PARAMS['hydro_month_sh'] = 4
cfg.PARAMS['prcp_scaling_factor'] = 2.5
tasks.process_histalp_data(gdir)
with warnings.catch_warnings():
Expand Down
5 changes: 1 addition & 4 deletions docs/api.rst
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@ Tools to set-up and run OGGM.
workflow.execute_entity_task
workflow.gis_prepro_tasks
workflow.download_ref_tstars
workflow.climate_tasks
workflow.inversion_tasks
workflow.merge_glacier_tasks
workflow.calibrate_inversion_from_consensus
Expand Down Expand Up @@ -135,7 +134,6 @@ the majority of OGGM's tasks). They are parallelizable.
tasks.process_climate_data
tasks.process_custom_climate_data
tasks.historical_delta_method
tasks.historical_climate_qc
tasks.local_t_star
tasks.mu_star_calibration
tasks.apparent_mb_from_linear_mb
Expand Down Expand Up @@ -164,7 +162,7 @@ the majority of OGGM's tasks). They are parallelizable.
tasks.run_from_climate_data
tasks.run_constant_climate
tasks.run_dynamic_spinup
tasks.run_dynamic_mu_star_calibration
tasks.run_dynamic_melt_f_calibration
tasks.copy_to_basedir
tasks.gdir_to_tar

Expand All @@ -182,7 +180,6 @@ but might use multiprocessing internally.
:nosignatures:

global_tasks.gis_prepro_tasks
global_tasks.climate_tasks
global_tasks.inversion_tasks
global_tasks.calibrate_inversion_from_consensus
global_tasks.match_regional_geodetic_mb
Expand Down
4 changes: 2 additions & 2 deletions docs/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ Enhancements
By `Lilian Schuster <https://github.com/lilianschuster>`_
- added support for a precipitation factor varying per glacier (:pull:`1435`).
By `Lilian Schuster <https://github.com/lilianschuster>`_
- Added a new entity task ``run_dynamic_mu_star_calibration``. This task
- Added a new entity task ``run_dynamic_melt_f_calibration``. This task
dynamically calibrates the temperature sensitivity mu star to a geodetic
mass-balance observation. There are different options available how this is
done, the default incorporates an inversion and a dynamic spinup in each
Expand All @@ -44,7 +44,7 @@ Enhancements
able to globally define the used minimum ice thickness for the dynamic spinup
(:pull:`1425`).
By `Patrick Schmitt <https://github.com/pat-schmitt>`_
- Rearanged the entity tasks ``run_dynamic_mu_star_calibration`` and
- Rearanged the entity tasks ``run_dynamic_melt_f_calibration`` and
``run_dynamic_spinup`` with all help functions in new modul
``oggm.core.dynamic_spinup`` (:pull:`1425`).
By `Patrick Schmitt <https://github.com/pat-schmitt>`_
Expand Down
75 changes: 23 additions & 52 deletions oggm/cfg.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,8 +114,9 @@ class ParamsLoggingDict(ResettingOrderedDict):

def __setitem__(self, key, value):
# Overrides the original dic to log the change
self._check_input(key, value)
if self.do_log:
if key not in self:
raise InvalidParamsError(f'Parameter `{key}` not in PARAMS')
self._log_param_change(key, value)
ResettingOrderedDict.__setitem__(self, key, value)

Expand Down Expand Up @@ -160,16 +161,6 @@ def _log_param_change(self, key, value):
prev,
value))

def _check_input(self, key, value):

if key == 'hydro_month_sh' and value == 1:
nh = self.get('hydro_month_nh')
if nh is not None and nh != 1:
msg = ("When setting PARAMS['hydro_month_sh'] to 1, please set "
"PARAMS['hydro_month_nh'] to 1 first.")
raise InvalidWorkflowError(msg)


# Globals
IS_INITIALIZED = False
PARAMS = ParamsLoggingDict()
Expand Down Expand Up @@ -201,27 +192,27 @@ def _check_input(self, key, value):
' value 0 at unglaciated points.')
BASENAMES['glacier_mask'] = ('glacier_mask.tif', _doc)

_doc = ('The glacier outlines in the local map projection (Transverse '
'Mercator).')
_doc = ('The glacier outlines in the local map projection '
'(Transverse Mercator or UTM).')
BASENAMES['outlines'] = ('outlines.shp', _doc)

_doc = ('The glacier intersects in the local map projection (Transverse '
'Mercator).')
_doc = 'The glacier intersects in the local map projection.'
BASENAMES['intersects'] = ('intersects.shp', _doc)

_doc = ('Each flowline has a catchment area computed from flow routing '
'algorithms: this shapefile stores the catchment outlines (in the '
'local map projection (Transverse Mercator).')
'local map projection).')
BASENAMES['flowline_catchments'] = ('flowline_catchments.shp', _doc)

_doc = ('The intersections between catchments (shapefile) in the local map '
'projection (Transverse Mercator).')
'projection.')
BASENAMES['catchments_intersects'] = ('catchments_intersects.shp', _doc)

_doc = 'A ``salem.Grid`` handling the georeferencing of the local grid.'
BASENAMES['glacier_grid'] = ('glacier_grid.json', _doc)

_doc = 'A dictionary containing runtime diagnostics useful for debugging.'
_doc = ('A dictionary containing runtime diagnostics useful for debugging or '
'logging of run parameters.')
BASENAMES['diagnostics'] = ('diagnostics.json', _doc)

_doc = ('A netcdf file containing several gridded data variables such as '
Expand Down Expand Up @@ -254,7 +245,7 @@ def _check_input(self, key, value):
_doc = 'A list of :py:class:`oggm.Centerline` instances, sorted by flow order.'
BASENAMES['centerlines'] = ('centerlines.pkl', _doc)

_doc = ('A "better" version of the Centerlines, now on a regular spacing '
_doc = ('A "better" version of the centerlines, now on a regular spacing '
'i.e., not on the gridded (i, j) indices. The tails of the '
'tributaries are cut out to make more realistic junctions. '
'They are now "1.5D" i.e., with a width.')
Expand All @@ -263,26 +254,19 @@ def _check_input(self, key, value):
_doc = 'The historical monthly climate timeseries stored in a netCDF file.'
BASENAMES['climate_historical'] = ('climate_historical.nc', _doc)

# so far, this is only ERA5_daily and does not work with the default OGGM
# mass balance module
# so far, this is only ERA5 or E5E5 daily and does not work with the default
# OGGM mass balance module, only with sandbox
_doc = ('The historical daily climate timeseries stored in a netCDF file.'
'(only temperature is really changing on daily basis,'
'precipitation is just assumed constant for every day')
BASENAMES['climate_historical_daily'] = ('climate_historical_daily.nc', _doc)

_doc = 'Deprecated: old name for `climate_historical`.'
BASENAMES['climate_monthly'] = ('climate_monthly.nc', _doc)

_doc = ('Some information (dictionary) about the mass '
'balance parameters for this glacier.')
BASENAMES['climate_info'] = ('climate_info.json', _doc)
_doc = "A dict containing the glacier's mass balance calibration parameters."
BASENAMES['mb_calib'] = ('mb_calib.json', _doc)

_doc = 'The monthly GCM climate timeseries stored in a netCDF file.'
BASENAMES['gcm_data'] = ('gcm_data.nc', _doc)

_doc = "A dict containing the glacier's t*, bias, and the flowlines' mu*"
BASENAMES['local_mustar'] = ('local_mustar.json', _doc)

_doc = 'List of dicts containing the data needed for the inversion.'
BASENAMES['inversion_input'] = ('inversion_input.pkl', _doc)

Expand All @@ -296,11 +280,8 @@ def _check_input(self, key, value):
'the optimal ela_h and grad.')
BASENAMES['linear_mb_params'] = ('linear_mb_params.pkl', _doc)

_doc = 'Deprecated: renamed to `model_geometry`.'
BASENAMES['model_run'] = ('model_run.nc', _doc)

_doc = ('A netcdf file containing enough information to reconstruct the '
'entire flowline glacier geometry along the run (can be expensive'
'entire flowline glacier geometry along the run (can be expensive '
'in disk space).')
BASENAMES['model_geometry'] = ('model_geometry.nc', _doc)

Expand Down Expand Up @@ -529,14 +510,10 @@ def initialize_minimal(file=None, logging_level='INFO', params=None,
PARAMS['filter_min_slope'] = cp.as_bool('filter_min_slope')
PARAMS['downstream_line_shape'] = cp['downstream_line_shape']
PARAMS['auto_skip_task'] = cp.as_bool('auto_skip_task')
PARAMS['correct_for_neg_flux'] = cp.as_bool('correct_for_neg_flux')
PARAMS['filter_for_neg_flux'] = cp.as_bool('filter_for_neg_flux')
PARAMS['run_mb_calibration'] = cp.as_bool('run_mb_calibration')
PARAMS['rgi_version'] = cp['rgi_version']
PARAMS['use_rgi_area'] = cp.as_bool('use_rgi_area')
PARAMS['compress_climate_netcdf'] = cp.as_bool('compress_climate_netcdf')
PARAMS['use_tar_shapefiles'] = cp.as_bool('use_tar_shapefiles')
PARAMS['clip_mu_star'] = cp.as_bool('clip_mu_star')
PARAMS['clip_tidewater_border'] = cp.as_bool('clip_tidewater_border')
PARAMS['dl_verify'] = cp.as_bool('dl_verify')
PARAMS['calving_line_extension'] = cp.as_int('calving_line_extension')
Expand All @@ -550,13 +527,10 @@ def initialize_minimal(file=None, logging_level='INFO', params=None,
PARAMS['store_fl_diagnostics'] = cp.as_bool('store_fl_diagnostics')

# Climate
PARAMS['use_tstar_calibration'] = cp.as_bool('use_tstar_calibration')
PARAMS['baseline_climate'] = cp['baseline_climate'].strip().upper()
PARAMS['hydro_month_nh'] = cp.as_int('hydro_month_nh')
PARAMS['hydro_month_sh'] = cp.as_int('hydro_month_sh')
PARAMS['climate_qc_months'] = cp.as_int('climate_qc_months')
PARAMS['temp_use_local_gradient'] = cp.as_bool('temp_use_local_gradient')
PARAMS['tstar_search_glacierwide'] = cp.as_bool('tstar_search_glacierwide')
PARAMS['geodetic_mb_period'] = cp['geodetic_mb_period']
PARAMS['use_winter_prcp_factor'] = cp.as_bool('use_winter_prcp_factor')

Expand All @@ -566,11 +540,8 @@ def initialize_minimal(file=None, logging_level='INFO', params=None,
PARAMS[k] = [float(vk) for vk in cp.as_list(k)]
k = 'temp_local_gradient_bounds'
PARAMS[k] = [float(vk) for vk in cp.as_list(k)]
k = 'tstar_search_window'
PARAMS[k] = [int(vk) for vk in cp.as_list(k)]
k = 'ref_mb_valid_window'
PARAMS[k] = [int(vk) for vk in cp.as_list(k)]
PARAMS['use_bias_for_run'] = cp.as_bool('use_bias_for_run')
k = 'free_board_marine_terminating'
PARAMS[k] = [float(vk) for vk in cp.as_list(k)]
k = 'store_diagnostic_variables'
Expand Down Expand Up @@ -599,22 +570,22 @@ def initialize_minimal(file=None, logging_level='INFO', params=None,

# Delete non-floats
ltr = ['working_dir', 'dem_file', 'climate_file', 'use_tar_shapefiles',
'grid_dx_method', 'run_mb_calibration', 'compress_climate_netcdf',
'mp_processes', 'use_multiprocessing', 'climate_qc_months',
'grid_dx_method', 'compress_climate_netcdf',
'mp_processes', 'use_multiprocessing',
'temp_use_local_gradient', 'temp_local_gradient_bounds',
'topo_interp', 'use_compression', 'bed_shape', 'continue_on_error',
'use_multiple_flowlines', 'tstar_search_glacierwide', 'border',
'mpi_recv_buf_size', 'hydro_month_nh', 'clip_mu_star', 'map_proj',
'tstar_search_window', 'use_bias_for_run', 'hydro_month_sh',
'use_multiple_flowlines', 'border',
'mpi_recv_buf_size', 'map_proj',
'hydro_month_sh', 'hydro_month_nh',
'use_intersects', 'filter_min_slope', 'clip_tidewater_border',
'auto_skip_task', 'correct_for_neg_flux', 'filter_for_neg_flux',
'auto_skip_task','ref_mb_valid_window',
'rgi_version', 'dl_verify', 'use_mp_spawn', 'calving_use_limiter',
'use_shape_factor_for_inversion', 'use_rgi_area', 'use_tstar_calibration',
'use_shape_factor_for_inversion', 'use_rgi_area',
'use_shape_factor_for_fluxbasedmodel', 'baseline_climate',
'calving_line_extension', 'use_kcalving_for_run', 'lru_maxsize',
'free_board_marine_terminating', 'use_kcalving_for_inversion',
'error_when_glacier_reaches_boundaries', 'glacier_length_method',
'use_inversion_params_for_run', 'ref_mb_valid_window',
'use_inversion_params_for_run',
'tidewater_type', 'store_model_geometry', 'use_winter_prcp_factor',
'store_diagnostic_variables', 'store_fl_diagnostic_variables',
'geodetic_mb_period', 'store_fl_diagnostics', 'winter_prcp_factor_ab',
Expand Down
34 changes: 16 additions & 18 deletions oggm/cli/benchmark.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,9 @@ def _add_time_to_df(df, index, t):

def run_benchmark(rgi_version=None, rgi_reg=None, border=None,
output_folder='', working_dir='', is_test=False,
test_rgidf=None, test_intersects_file=None,
override_params=None, test_topofile=None):
logging_level='WORKFLOW', test_rgidf=None,
test_intersects_file=None, override_params=None,
test_topofile=None):
"""Does the actual job.
Parameters
Expand Down Expand Up @@ -62,17 +63,14 @@ def run_benchmark(rgi_version=None, rgi_reg=None, border=None,
log = logging.getLogger(__name__)

# Params
params = {}

# Local paths
if override_params is None:
override_params = {}

utils.mkdir(working_dir)
override_params['working_dir'] = working_dir

# Initialize OGGM and set up the run parameters
cfg.initialize(logging_level='WORKFLOW', params=override_params, future=True)
cfg.initialize(logging_level=logging_level, params=override_params, future=True)

# Use multiprocessing?
cfg.PARAMS['use_multiprocessing'] = True
Expand Down Expand Up @@ -126,7 +124,6 @@ def run_benchmark(rgi_version=None, rgi_reg=None, border=None,
# Input
if test_topofile:
cfg.PATHS['dem_file'] = test_topofile
utils.apply_test_ref_tstars()

# Initialize working directories
start = time.time()
Expand All @@ -146,8 +143,8 @@ def run_benchmark(rgi_version=None, rgi_reg=None, border=None,
tasks.catchment_intersections,
tasks.catchment_width_geom,
tasks.catchment_width_correction,
tasks.local_t_star,
tasks.mu_star_calibration,
tasks.mb_calibration_from_geodetic_mb,
tasks.apparent_mb_from_any_mb,
tasks.prepare_for_inversion,
tasks.mass_conservation_inversion,
tasks.filter_inversion_output,
Expand All @@ -160,15 +157,16 @@ def run_benchmark(rgi_version=None, rgi_reg=None, border=None,

# Runs
start = time.time()
workflow.execute_entity_task(tasks.run_random_climate, gdirs,
nyears=250, bias=0, seed=0,
output_filesuffix='_tstar')
_add_time_to_df(odf, 'run_random_climate_tstar_250', time.time()-start)
workflow.execute_entity_task(tasks.run_constant_climate, gdirs,
nyears=250, y0=1995,
temperature_bias=-0.5,
output_filesuffix='_constant')
_add_time_to_df(odf, 'run_constant_climate_commit_250', time.time()-start)

start = time.time()
workflow.execute_entity_task(tasks.run_random_climate, gdirs,
nyears=250, y0=1995, seed=0,
output_filesuffix='_commit')
output_filesuffix='_random')
_add_time_to_df(odf, 'run_random_climate_commit_250', time.time()-start)

# Compile results
Expand All @@ -182,12 +180,12 @@ def run_benchmark(rgi_version=None, rgi_reg=None, border=None,
_add_time_to_df(odf, 'compile_climate_statistics', time.time()-start)

start = time.time()
utils.compile_run_output(gdirs, input_filesuffix='_tstar')
_add_time_to_df(odf, 'compile_run_output_tstar', time.time()-start)
utils.compile_run_output(gdirs, input_filesuffix='_constant')
_add_time_to_df(odf, 'compile_run_output_constant', time.time()-start)

start = time.time()
utils.compile_run_output(gdirs, input_filesuffix='_commit')
_add_time_to_df(odf, 'compile_run_output_commit', time.time()-start)
utils.compile_run_output(gdirs, input_filesuffix='_random')
_add_time_to_df(odf, 'compile_run_output_random', time.time()-start)

# Log
opath = os.path.join(base_dir, 'benchmarks_b{:03d}.csv'.format(border))
Expand Down

0 comments on commit fcd8308

Please sign in to comment.