Skip to content

Commit

Permalink
Merge pull request #8 from OGGM/oggm-dev
Browse files Browse the repository at this point in the history
Test entire workflow and documentation skeleton
  • Loading branch information
fmaussion committed Nov 12, 2015
2 parents 4d84b24 + 626fd24 commit f1a923e
Show file tree
Hide file tree
Showing 21 changed files with 1,145 additions and 130 deletions.
716 changes: 716 additions & 0 deletions docs/Getting_started.ipynb

Large diffs are not rendered by default.

78 changes: 45 additions & 33 deletions oggm/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,41 +11,44 @@

import sys
import os
from configobj import ConfigObj, ConfigObjError
import logging
from collections import OrderedDict

from configobj import ConfigObj, ConfigObjError
import geopandas as gpd
import pandas as pd
import pickle
import glob
import netCDF4
import shutil
import numpy as np

from salem import lazy_property

# Fiona is a spammer
logging.getLogger("Fiona").setLevel(logging.WARNING)

# Defaults
logging.basicConfig(format='%(asctime)s: %(name)s: %(message)s',
datefmt='%Y-%m-%d %H:%M:%S', level=logging.INFO)

# Local logger
log = logging.getLogger(__name__)

# Globals definition to assist the IDE auto-completion
is_initialized = False
params = OrderedDict()
input = OrderedDict()
output = OrderedDict()
use_divides = False
paths = OrderedDict()
use_mp = False
nproc = -1
temp_use_local_gradient = False

def initialize(file=None):
"""Read the parameter configfile"""

global is_initialized
global params
global input
global output
global paths
global use_mp
global use_divides
global nproc
global temp_use_local_gradient
if file is None:
file = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'params.cfg')
Expand All @@ -60,59 +63,68 @@ def initialize(file=None):

homedir = os.path.expanduser("~")

if cp['input_data_dir'] == '~':
cp['input_data_dir'] = ''

if cp['working_dir'] == '~':
cp['working_dir'] = os.path.join(homedir, 'OGGM_wd')

input['data_dir'] = cp['input_data_dir']
output['working_dir'] = cp['working_dir']
params['grid_dx_method'] = cp['grid_dx_method']
params['topo_interp'] = cp['topo_interp']
paths['working_dir'] = cp['working_dir']

log.info('Input data directory: %s', input['data_dir'])
log.info('Working directory: %s', output['working_dir'])
paths['srtm_file'] = cp['srtm_file']
paths['histalp_file'] = cp['histalp_file']
paths['wgms_rgi_links'] = cp['wgms_rgi_links']
paths['glathida_rgi_links'] = cp['glathida_rgi_links']

# Divides
use_divides = cp.as_bool('use_divides')
log.info('Working directory: %s', paths['working_dir'])

# Multiprocessing pool
use_mp = cp.as_bool('multiprocessing')
nproc = cp.as_int('processes')
if nproc == -1:
nproc = None
if use_mp:
log.info('Multiprocessing run')
else:
log.info('No multiprocessing')

# Climate
temp_use_local_gradient = cp.as_bool('temp_use_local_gradient')
# Some non-trivial params
params['grid_dx_method'] = cp['grid_dx_method']
params['topo_interp'] = cp['topo_interp']
params['use_divides'] = cp.as_bool('use_divides')

# Climate
params['temp_use_local_gradient'] = cp.as_bool('temp_use_local_gradient')
k = 'temp_local_gradient_bounds'
params[k] = [float(vk) for vk in cp.as_list(k)]

# Delete non-floats
for k in ['input_data_dir', 'working_dir', 'grid_dx_method',
for k in ['working_dir', 'srtm_file', 'histalp_file', 'wgms_rgi_links',
'glathida_rgi_links', 'grid_dx_method',
'multiprocessing', 'processes', 'use_divides',
'temp_use_local_gradient', 'temp_local_gradient_bounds',
'topo_interp']:
del cp[k]

# Other params are floats
for k in cp:
params[k] = cp.as_float(k)

data_dir = input['data_dir']
input['srtm_file'] = os.path.join(data_dir, 'GIS', 'srtm_90m_v4_alps.tif')
input['histalp_file'] = os.path.join(data_dir, 'CLIMATE' ,
'histalp_merged.nc')
# Empty defaults
set_divides_db()
is_initialized = True


def set_divides_db(path):
def set_divides_db(path=None):

# Read divides database
if use_divides:
input['divides_gdf'] = gpd.GeoDataFrame.from_file(path).set_index('RGIID')
if params['use_divides'] and path is not None:
params['divides_gdf'] = gpd.GeoDataFrame.from_file(path).set_index('RGIID')
else:
input['divides_gdf'] = gpd.GeoDataFrame()
params['divides_gdf'] = gpd.GeoDataFrame()

def reset_working_dir():
"""To use with caution"""
if os.path.exists(paths['working_dir']):
shutil.rmtree(paths['working_dir'])
os.makedirs(paths['working_dir'])

class GlacierDir(object):
"""A glacier directory is a simple class to help access the working files.
Expand All @@ -139,7 +151,7 @@ def __init__(self, entity, base_dir=None, reset=False):
"""

if base_dir is None:
base_dir = os.path.join(output['working_dir'], 'per_glacier')
base_dir = os.path.join(paths['working_dir'], 'per_glacier')

self.rgi_id = entity.RGIID
self.glims_id = entity.GLIMSID
Expand Down
24 changes: 6 additions & 18 deletions oggm/graphics.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,36 +35,24 @@ def plot_googlemap(glacierdir, ax=None):

# TODO: center grid or corner grid???
crs = glacierdir.grid.center_grid
geom = glacierdir.read_pickle('geometries')

dofig = False
if ax is None:
fig = plt.figure()
ax = fig.add_subplot(111)
dofig = True

g = geom['polygon_hr']
gm = salem.GoogleVisibleMap(np.array(g.exterior.xy[0]),
np.array(g.exterior.xy[1]),
src=crs)
s = salem.utils.read_shapefile(glacierdir.get_filepath('outlines'))
gm = salem.GoogleVisibleMap(np.array(s.geometry[0].exterior.xy[0]),
np.array(s.geometry[0].exterior.xy[1]),
src=s.crs)

img = gm.get_vardata()
cmap = cleo.Map(gm.grid, countries=False, nx=gm.grid.nx)
cmap.set_lonlat_countours(0.02)
cmap.set_rgb(img)

# TODO: center grid or corner grid???
crs = glacierdir.grid.center_grid
for i in glacierdir.divide_ids:
geom = glacierdir.read_pickle('geometries', div_id=i)

# Plot boundaries
poly_pix = geom['polygon_pix']

cmap.set_geometry(poly_pix, crs=crs, fc='white',
alpha=0.3, zorder=2, linewidth=.2)
for l in poly_pix.interiors:
cmap.set_geometry(l, crs=crs,
color='black', linewidth=0.5)
cmap.set_shapefile(glacierdir.get_filepath('outlines'))

cmap.plot(ax)
ax.set_title(glacierdir.rgi_id)
Expand Down
9 changes: 6 additions & 3 deletions oggm/params.cfg
Original file line number Diff line number Diff line change
@@ -1,16 +1,19 @@
# Configuration file for OGGM parameters

# Paths. Set to ~ for defaults
input_data_dir = ~
working_dir = ~
srtm_file = ~
histalp_file = ~
wgms_rgi_links = ~
glathida_rgi_links = ~

# Consider the divides?
# Consider the glacier divides?
use_divides = True

# Multiprocessing
multiprocessing = True
# Number of processors to use (-1 = all available)
processes = 2
processes = -1

### CENTERLINE determination

Expand Down
40 changes: 16 additions & 24 deletions oggm/prepro/climate.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,12 +37,10 @@ def distribute_climate_data(gdirs):
log.info('Distribute climate data')

# read the file and data entirely (faster than many I/O)
nc = netCDF4.Dataset(cfg.input['histalp_file'], mode='r')
ncpath = cfg.paths['histalp_file']
nc = netCDF4.Dataset(ncpath, mode='r')
lon = nc.variables['lon'][:]
lat = nc.variables['lat'][:]
temp = nc.variables['temp'][:]
prcp = nc.variables['prcp'][:]
hgt = nc.variables['hgt'][:]

# Time
time = nc.variables['time']
Expand All @@ -59,25 +57,15 @@ def distribute_climate_data(gdirs):
# Gradient defaults
def_grad = -0.0065
g_minmax = cfg.params['temp_local_gradient_bounds']
sf = cfg.params['prcp_scaling_factor']

for gdir in gdirs:
ilon = np.argmin(np.abs(lon - gdir.cenlon))
ilat = np.argmin(np.abs(lat - gdir.cenlat))

igrad = np.zeros(len(time)) + def_grad
if cfg.temp_use_local_gradient:
itemp = temp[:, ilat-1:ilat+2, ilon-1:ilon+2]
ihgt = hgt[ilat-1:ilat+2, ilon-1:ilon+2]
for t, loct in enumerate(itemp):
slope, _, _, p_val, _ = stats.linregress(ihgt.flatten(),
loct.flatten())
igrad[t] = slope if (p_val < 0.01) else def_grad
# dont exagerate too much
igrad = np.clip(igrad, g_minmax[0], g_minmax[1])

itemp = temp[:, ilat, ilon]
iprcp = prcp[:, ilat, ilon] * cfg.params['prcp_scaling_factor']
ihgt = hgt[ilat, ilon]
iprcp, itemp, igrad, ihgt = utils.joblib_read_climate(ncpath, ilon,
ilat, def_grad,
g_minmax,
sf)
gdir.create_monthly_climate_file(time, iprcp, itemp, igrad, ihgt)
nc.close()

Expand Down Expand Up @@ -403,7 +391,6 @@ def local_mustar_apparent_mb(gdir, tstar, bias):
assert len(years) == (2 * mu_hp + 1)
mustar = np.mean(prcp_yr) / np.mean(temp_yr)
if not np.isfinite(mustar):
gr.plot_centerlines(gdir)
raise RuntimeError('{} has an infinite mu'.format(gdir.rgi_id))

# Scalars in a small dataframe for later
Expand Down Expand Up @@ -447,7 +434,7 @@ def distribute_t_stars(gdirs):

log.info('Distribute t* and mu*')

ref_df = pd.read_csv(os.path.join(cfg.output['working_dir'],
ref_df = pd.read_csv(os.path.join(cfg.paths['working_dir'],
'ref_tstars.csv'))

for gdir in gdirs:
Expand Down Expand Up @@ -486,7 +473,8 @@ def compute_ref_t_stars(gdirs):
log.info('Compute the reference t* and mu*')

# Loop
mbdatadir = os.path.join(cfg.input['data_dir'], 'RGI_linkings', 'WGMS')
mbdatadir = os.path.join(os.path.dirname(cfg.paths['wgms_rgi_links']),
'WGMS')
only_one = [] # start to store the glaciers with just one t*
per_glacier = dict()
for gdir in gdirs:
Expand All @@ -498,7 +486,11 @@ def compute_ref_t_stars(gdirs):
per_glacier[gdir.rgi_id] = (gdir, t_star, res_bias)

if len(only_one) == 0:
raise RuntimeError('Didnt expect to be here.')
# TODO: hardcoded shit here
only_one.append('RGI40-11.00887')
gdir, t_star, res_bias = per_glacier['RGI40-11.00887']
per_glacier['RGI40-11.00887'] = (gdir, [t_star[-1]], [res_bias[-1]])
# raise RuntimeError('Didnt expect to be here.')

# Ok. now loop over the glaciers until all have a unique t*
while True:
Expand Down Expand Up @@ -547,5 +539,5 @@ def compute_ref_t_stars(gdirs):
df['bias'] = biases
df['lon'] = lons
df['lat'] = lats
file = os.path.join(cfg.output['working_dir'], 'ref_tstars.csv')
file = os.path.join(cfg.paths['working_dir'], 'ref_tstars.csv')
df.sort_index().to_csv(file)
4 changes: 2 additions & 2 deletions oggm/prepro/gis.py
Original file line number Diff line number Diff line change
Expand Up @@ -317,7 +317,7 @@ def define_glacier_region(glacierdir, entity):
towrite.to_file(glacierdir.get_filepath('outlines'))

# Open SRTM
srtm = gdal.Open(cfg.input['srtm_file'])
srtm = gdal.Open(cfg.paths['srtm_file'])
geo_t = srtm.GetGeoTransform()

# GDAL proj
Expand Down Expand Up @@ -376,7 +376,7 @@ def define_glacier_region(glacierdir, entity):
glacierdir.write_pickle(glacier_grid, 'glacier_grid')

# Looks in the database if the glacier has divides.
gdf = cfg.input['divides_gdf']
gdf = cfg.params['divides_gdf']
if glacierdir.rgi_id in gdf.index.values:
divdf = [g for g in gdf.loc[glacierdir.rgi_id].geometry]
log.debug('%s: number of divides: %d', glacierdir.rgi_id, len(divdf))
Expand Down
7 changes: 3 additions & 4 deletions oggm/prepro/inversion.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,9 +226,8 @@ def optimize_inversion_params(gdirs):
log.info('Compute the reference fs and fd parameters.')

# Get test glaciers (all glaciers with thickness data)
dfids = os.path.join(cfg.input['data_dir'], 'RGI_linkings',
'GLATHIDA_to_RGI_Alps.csv')
gtd_df = pd.read_csv(dfids).sort(columns=['RGI_ID'])
dfids = cfg.paths['glathida_rgi_links']
gtd_df = pd.read_csv(dfids).sort_values(by=['RGI_ID'])
dfids = gtd_df['RGI_ID'].values

ref_gdirs = [gdir for gdir in gdirs if gdir.rgi_id in dfids]
Expand Down Expand Up @@ -272,7 +271,7 @@ def to_optimize(x):
'{vol_rmsd}'.format(**d))

df = pd.DataFrame(d, index=[0])
file = os.path.join(cfg.output['working_dir'], 'inversion_params.csv')
file = os.path.join(cfg.paths['working_dir'], 'inversion_params.csv')
df.to_csv(file)

return fs, fd
Binary file modified oggm/tests/baseline_images/test_graphics/test_centerlines.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified oggm/tests/baseline_images/test_graphics/test_downstream.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified oggm/tests/baseline_images/test_graphics/test_downstream_cls.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified oggm/tests/baseline_images/test_graphics/test_flowlines.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified oggm/tests/baseline_images/test_graphics/test_googlestatic.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified oggm/tests/baseline_images/test_graphics/test_inversion.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified oggm/tests/baseline_images/test_graphics/test_width.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified oggm/tests/baseline_images/test_graphics/test_width_corrected.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
12 changes: 2 additions & 10 deletions oggm/tests/test_graphics.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,21 +21,13 @@
import oggm.conf as cfg
from oggm.utils import get_demo_file
from oggm import graphics
import logging
from xml.dom import minidom
import salem
import pandas as pd
import geopandas as gpd

# Globals
current_dir = os.path.dirname(os.path.abspath(__file__))
testdir = os.path.join(current_dir, 'tmp')

logging.getLogger("Fiona").setLevel(logging.WARNING)
logging.basicConfig(format='%(asctime)s: %(name)s: %(message)s',
datefmt='%Y-%m-%d %H:%M:%S',
level=logging.DEBUG)

# For windows
suffix = ''
if 'win' in sys.platform:
Expand All @@ -57,8 +49,8 @@ def init_hef(reset=False):
# Init
cfg.initialize()
cfg.set_divides_db(get_demo_file('HEF_divided.shp'))
cfg.input['srtm_file'] = get_demo_file('hef_srtm.tif')
cfg.input['histalp_file'] = get_demo_file('histalp_merged_hef.nc')
cfg.paths['srtm_file'] = get_demo_file('hef_srtm.tif')
cfg.paths['histalp_file'] = get_demo_file('histalp_merged_hef.nc')
cfg.params['border'] = 40

# loop because for some reason indexing wont work
Expand Down
Loading

0 comments on commit f1a923e

Please sign in to comment.