Skip to content

Commit

Permalink
Merge 9b406fd into a3123b5
Browse files Browse the repository at this point in the history
  • Loading branch information
afisc committed Nov 8, 2022
2 parents a3123b5 + 9b406fd commit ca5f3ad
Show file tree
Hide file tree
Showing 7 changed files with 130 additions and 47 deletions.
4 changes: 2 additions & 2 deletions oggm/tests/test_prepro.py
Original file line number Diff line number Diff line change
Expand Up @@ -2952,7 +2952,7 @@ def test_continue_on_error(self):
entity = gpd.read_file(hef_file).iloc[0]
miniglac = shpg.Point(entity.CenLon, entity.CenLat).buffer(0.0001)
entity.geometry = miniglac
entity.RGIId = 'RGI50-11.fake'
entity.RGIId = 'RGI50-11.faked'

gdir = oggm.GlacierDirectory(entity, base_dir=self.testdir)
gis.define_glacier_region(gdir)
Expand All @@ -2969,7 +2969,7 @@ def test_continue_on_error(self):
inversion.mass_conservation_inversion(gdir)

rdir = os.path.join(self.testdir, 'RGI50-11', 'RGI50-11.fa',
'RGI50-11.fake')
'RGI50-11.faked')
self.assertTrue(os.path.exists(rdir))

rdir = os.path.join(rdir, 'log.txt')
Expand Down
23 changes: 13 additions & 10 deletions oggm/tests/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -508,7 +508,9 @@ def setUp(self):

# Read in the RGI file
rgi_file = utils.get_demo_file('rgi_oetztal.shp')
self.rgidf = gpd.read_file(rgi_file).sample(4)
rgidf = gpd.read_file(rgi_file)
rgidf = rgidf.loc[['_d0' not in d for d in rgidf.RGIId]].copy()
self.rgidf = rgidf.sample(4)
cfg.PATHS['dem_file'] = utils.get_demo_file('srtm_oetztal.tif')
cfg.PATHS['working_dir'] = self.testdir
self.clean_dir()
Expand Down Expand Up @@ -835,11 +837,16 @@ def _read_shp(self):
inter = gpd.read_file(utils.get_demo_file('rgi_intersect_oetztal.shp'))
rgidf = gpd.read_file(utils.get_demo_file('rgi_oetztal.shp'))

rgidf['RGIId'] = [rid.replace('RGI50', 'RGI60') for rid in rgidf.RGIId]
# Some changes for changes in OGGM
inter['RGIId_1'] = [rid.replace('RGI50', 'RGI60')
for rid in inter.RGIId_1]
inter['RGIId_2'] = [rid.replace('RGI50', 'RGI60')
for rid in inter.RGIId_2]

# Here as well - we don't do the custom RGI IDs anymore
rgidf['RGIId'] = [rid.replace('RGI50', 'RGI60') for rid in rgidf.RGIId]
rgidf = rgidf.loc[['_d0' not in d for d in rgidf.RGIId]].copy()

return inter, rgidf

def test_parse_args(self):
Expand Down Expand Up @@ -1163,12 +1170,7 @@ def test_full_run(self):
new = ods.volume_fixed_geom
np.testing.assert_allclose(new.isel(time=-1),
ref.isel(time=-1),
rtol=0.02)

vn = 'volume'
np.testing.assert_allclose(ods[vn].sel(time=1990),
ods[vn].sel(time=2015),
rtol=0.3)
rtol=0.05)

for vn in ['calving', 'volume_bsl', 'volume_bwl']:
np.testing.assert_allclose(ods[vn].sel(time=1990), 0)
Expand All @@ -1193,6 +1195,7 @@ def test_elev_bands_and_spinup_run(self):
# the test glaciers only go up to 2015
run_prepro_levels(rgi_version='61', rgi_reg='11', border=border,
output_folder=odir, working_dir=wdir, is_test=True,
test_ids=['RGI60-11.00929'],
dynamic_spinup='area/dmdtda', test_rgidf=rgidf,
test_intersects_file=inter,
test_topofile=topof, elev_bands=True,
Expand Down Expand Up @@ -1236,7 +1239,7 @@ def test_elev_bands_and_spinup_run(self):
from oggm import tasks
from oggm.core.flowline import FlowlineModel, FileModel
cfg.PARAMS['continue_on_error'] = False
rid = df.rgi_id.iloc[1]
rid = df.rgi_id.iloc[0]
entity = rgidf.loc[rgidf.RGIId == rid].iloc[0]

# L3
Expand Down Expand Up @@ -1606,7 +1609,7 @@ def test_parse_args(self):
assert kwargs['border'] == 120

@pytest.mark.slow
def test_full_run(self):
def test_full_benchmark_run(self):

from oggm.cli.benchmark import run_benchmark

Expand Down
15 changes: 14 additions & 1 deletion oggm/tests/test_workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,20 @@ def up_to_climate(reset=False, use_mp=None):
rgidf.loc[1, 'GlacType'] = '0299'

# Use RGI6
rgidf['RGIId'] = [s.replace('RGI50', 'RGI60') for s in rgidf.RGIId]
new_ids = []
count = 0
for s in rgidf.RGIId:
s = s.replace('RGI50', 'RGI60')
if '_d0' in s:
# We dont do this anymore
s = 'RGI60-11.{:05d}'.format(99999 - count)
count += 1
new_ids.append(s)
rgidf['RGIId'] = new_ids


# Here as well - we don't do the custom RGI IDs anymore
rgidf = rgidf.loc[['_d0' not in d for d in rgidf.RGIId]].copy()

# Be sure data is downloaded
cru.get_cru_cl_file()
Expand Down
6 changes: 5 additions & 1 deletion oggm/utils/_downloads.py
Original file line number Diff line number Diff line change
Expand Up @@ -2106,8 +2106,12 @@ def get_rgi_intersects_entities(rgi_ids, version=None):
version = cfg.PARAMS['rgi_version']
if len(version) == 1:
version += '0'
try:
regions = [s.split('-')[3] for s in rgi_ids]

regions = [s.split('-')[1].split('.')[0] for s in rgi_ids]
except IndexError:
# RGI V6
regions = [s.split('-')[1].split('.')[0] for s in rgi_ids]
selection = []
for reg in sorted(np.unique(regions)):
sh = gpd.read_file(get_rgi_intersects_region_file(reg,
Expand Down
15 changes: 12 additions & 3 deletions oggm/utils/_funcs.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
# Locals
import oggm.cfg as cfg
from oggm.cfg import SEC_IN_YEAR, SEC_IN_MONTH
from oggm.utils._downloads import get_demo_file
from oggm.utils._downloads import get_demo_file, file_downloader
from oggm.exceptions import InvalidParamsError, InvalidGeometryError

# Module logger
Expand Down Expand Up @@ -68,12 +68,21 @@ def parse_rgi_meta(version=None):
return _RGI_METADATA[version]

# Parse RGI metadata
reg_names = pd.read_csv(get_demo_file('rgi_regions.csv'), index_col=0)
if version in ['4', '5']:
if version == '7':
reg_names = gpd.read_file(file_downloader('https://cluster.klima.uni-bremen.de/~fmaussion/misc/rgi7_data/00_rgi70_regions/00_rgi70_O1Regions/00_rgi70_O1Regions.dbf'))
reg_names.index = reg_names['o1region'].astype(int)
reg_names = reg_names['full_name']
subreg_names = gpd.read_file(file_downloader('https://cluster.klima.uni-bremen.de/~fmaussion/misc/rgi7_data/00_rgi70_regions/00_rgi70_O2Regions/00_rgi70_O2Regions.dbf'))
subreg_names.index = subreg_names['o2region']
subreg_names = subreg_names['full_name']

elif version in ['4', '5']:
reg_names = pd.read_csv(get_demo_file('rgi_regions.csv'), index_col=0)
# The files where different back then
subreg_names = pd.read_csv(get_demo_file('rgi_subregions_V5.csv'),
index_col=0)
else:
reg_names = pd.read_csv(get_demo_file('rgi_regions.csv'), index_col=0)
f = os.path.join(get_demo_file('rgi_subregions_'
'V{}.csv'.format(version)))
subreg_names = pd.read_csv(f)
Expand Down
98 changes: 71 additions & 27 deletions oggm/utils/_workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -2549,7 +2549,7 @@ def __init__(self, rgi_entity, base_dir=None, reset=False,
if isinstance(rgi_entity, str):
# Get the meta from the shape file directly
if from_tar:
_dir = os.path.join(base_dir, rgi_entity[:8], rgi_entity[:11],
_dir = os.path.join(base_dir, rgi_entity[:-6], rgi_entity[:-3],
rgi_entity)
# Avoid bad surprises
if os.path.exists(_dir):
Expand All @@ -2560,7 +2560,7 @@ def __init__(self, rgi_entity, base_dir=None, reset=False,
from_tar = False # to not re-unpack later below
_shp = os.path.join(_dir, 'outlines.shp')
else:
_shp = os.path.join(base_dir, rgi_entity[:8], rgi_entity[:11],
_shp = os.path.join(base_dir, rgi_entity[:-6], rgi_entity[:-3],
rgi_entity, 'outlines.shp')
rgi_entity = self._read_shapefile_from_path(_shp)
crs = salem.check_crs(rgi_entity.crs)
Expand All @@ -2586,52 +2586,97 @@ def __init__(self, rgi_entity, base_dir=None, reset=False,
except AttributeError:
pass

self.rgi_id = rgi_entity.RGIId
self.glims_id = rgi_entity.GLIMSId



try:
self.rgi_id = rgi_entity.rgi_id
self.glims_id = rgi_entity.glims_id
except AttributeError:
# RGI V6
self.rgi_id = rgi_entity.RGIId
self.glims_id = rgi_entity.GLIMSId
# Do we want to use the RGI center point or ours?
if cfg.PARAMS['use_rgi_area']:
self.cenlon = float(rgi_entity.CenLon)
self.cenlat = float(rgi_entity.CenLat)
try:
self.cenlon = float(rgi_entity.cenlon)
self.cenlat = float(rgi_entity.cenlat)
except AttributeError:
# RGI V6
self.cenlon = float(rgi_entity.CenLon)
self.cenlat = float(rgi_entity.CenLat)
else:
cenlon, cenlat = rgi_entity.geometry.representative_point().xy
self.cenlon = float(cenlon[0])
self.cenlat = float(cenlat[0])

self.rgi_region = '{:02d}'.format(int(rgi_entity.O1Region))
self.rgi_subregion = (self.rgi_region + '-' +
'{:02d}'.format(int(rgi_entity.O2Region)))
name = rgi_entity.Name
rgi_datestr = rgi_entity.BgnDate
try:
self.rgi_region = rgi_entity.o1region
self.rgi_subregion = rgi_entity.o2region
except AttributeError:
# RGI V6
self.rgi_region = '{:02d}'.format(int(rgi_entity.O1Region))
self.rgi_subregion = (self.rgi_region + '-' +
'{:02d}'.format(int(rgi_entity.O2Region)))


try:
gtype = rgi_entity.GlacType
name = str(rgi_entity.name)
rgi_datestr = rgi_entity.src_date
except AttributeError:
# RGI V6
gtype = [str(rgi_entity.Form), str(rgi_entity.TermType)]
name = rgi_entity.Name
rgi_datestr = rgi_entity.BgnDate


try:
gtype = rgi_entity.GlacType
except AttributeError:
try:
# RGI V6
gtype = [str(rgi_entity.Form), str(rgi_entity.TermType)]
except AttributeError:
# temporary default for RGI V7:
gtype = ['0', '0']


try:
gstatus = rgi_entity.RGIFlag[0]
except AttributeError:
# RGI V6
gstatus = rgi_entity.Status
try:
# RGI V6
gstatus = rgi_entity.Status
except AttributeError:
# temporary default for RGI V7:
gstatus = '0'

# rgi version can be useful
self.rgi_version = self.rgi_id.split('-')[0][-2:]
if self.rgi_version not in ['50', '60', '61']:
raise RuntimeError('RGI Version not supported: '
'{}'.format(self.rgi_version))

rgi_version = self.rgi_id.split('-')[1][1] + self.rgi_id.split('-')[1][3]
if rgi_version == '70':
self.rgi_version = rgi_version
else:
rgi_version = self.rgi_id.split('-')[0][-2:]
if rgi_version not in ['50', '60', '61']:
raise RuntimeError('RGI Version not supported: '
'{}'.format(self.rgi_version))
else:
self.rgi_version = rgi_version
# remove spurious characters and trailing blanks
self.name = filter_rgi_name(name)

# region
reg_names, subreg_names = parse_rgi_meta(version=self.rgi_version[0])
n = reg_names.loc[int(self.rgi_region)].values[0]
self.rgi_region_name = self.rgi_region + ': ' + n
reg_name = reg_names.loc[int(self.rgi_region)]
# RGI V6
if not isinstance(reg_name, str):
reg_name = reg_name.values[0]
self.rgi_region_name = self.rgi_region + ': ' + reg_name
try:
n = subreg_names.loc[self.rgi_subregion].values[0]
self.rgi_subregion_name = self.rgi_subregion + ': ' + n
subreg_name = subreg_names.loc[self.rgi_subregion]
# RGI V6
if not isinstance(subreg_name, str):
subreg_name = subreg_name.values[0]
self.rgi_subregion_name = self.rgi_subregion + ': ' + subreg_name
except KeyError:
self.rgi_subregion_name = self.rgi_subregion + ': NoName'

Expand Down Expand Up @@ -2690,11 +2735,10 @@ def __init__(self, rgi_entity, base_dir=None, reset=False,
if rgi_date < 0:
rgi_date = RGI_DATE[self.rgi_region]
self.rgi_date = rgi_date

# Root directory
self.base_dir = os.path.normpath(base_dir)
self.dir = os.path.join(self.base_dir, self.rgi_id[:8],
self.rgi_id[:11], self.rgi_id)
self.dir = os.path.join(self.base_dir, self.rgi_id[:-6],
self.rgi_id[:-3], self.rgi_id)

# Do we have to extract the files first?
if (reset or from_tar) and os.path.exists(self.dir):
Expand Down
16 changes: 13 additions & 3 deletions oggm/workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -425,15 +425,25 @@ def init_glacier_directories(rgidf=None, *, reset=False, force=False,
len(cfg.PARAMS['intersects_gdf']) == 0 and
not from_tar):
try:
rgi_ids = np.unique(np.sort([entity.RGIId for entity in
rgi_ids = np.unique(np.sort([entity.rgi_id for entity in
entities]))
rgi_version = rgi_ids[0].split('-')[0][-2:]
fp = utils.get_rgi_intersects_entities(rgi_ids,
version=rgi_version)
cfg.set_intersects_db(fp)
except AttributeError:
# List of str
pass
# RGI V6
try:
rgi_ids = np.unique(np.sort([entity.RGIId for entity in
entities]))
rgi_version = rgi_ids[0].split('-')[0][-2:]
fp = utils.get_rgi_intersects_entities(rgi_ids,
version=rgi_version)
cfg.set_intersects_db(fp)
except AttributeError:
# List of str
pass


if _isdir(from_tar):
gdirs = execute_entity_task(gdir_from_tar, entities,
Expand Down

0 comments on commit ca5f3ad

Please sign in to comment.