Skip to content

Commit

Permalink
Small bugfixes in projections and warning handling (#84)
Browse files Browse the repository at this point in the history
  • Loading branch information
fmaussion committed Nov 9, 2017
1 parent 2afabb0 commit 36d608e
Show file tree
Hide file tree
Showing 9 changed files with 48 additions and 84 deletions.
6 changes: 5 additions & 1 deletion docs/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,18 @@ Enhancements
- new :py:func:`~Grid.to_geometry` method, useful to compute precise
vector to raster masks (TODO: example showing its use)
- new projection for WRF files: polar stereographic
- you can now add a scale bar to maps (see :py:func:`~Map.set_scale_bar`)
- one can now add a scale bar to maps (see :py:func:`~Map.set_scale_bar`)
- each salem version is now pinned to a certain commit of the sample-data
repository. This is more robust and will avoid future tests to fail
for the wrong reasons.


Bug fixes
~~~~~~~~~

- the cache directory is also updated when the ``pandas`` version changes
(:issue:`74`)
- small bugfixes in the projections and warning handling


v0.2.1 (07 February 2017)
Expand Down
5 changes: 5 additions & 0 deletions salem/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ def _lazy_property(self):

return _lazy_property


# Default proj
wgs84 = pyproj.Proj(proj='latlong', datum='WGS84')

Expand All @@ -45,6 +46,10 @@ def _lazy_property(self):
if not path.exists(download_dir):
makedirs(download_dir)

sample_data_gh_commit = 'aeb4cff0f61138701ab62a5b2ed2ceac7b809317'
sample_data_dir = path.join(cache_dir, 'salem-sample-data-' +
sample_data_gh_commit)

# python version
python_version = 'py3'
if sys.version_info.major == 2:
Expand Down
4 changes: 3 additions & 1 deletion salem/datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -421,7 +421,9 @@ def get_vardata(self, var_id=0, as_xarray=False):
it = slice(self.sub_x[0], self.sub_x[1]+1)
item.append(it)

out = v[tuple(item)]
with np.errstate(invalid='ignore'):
# This is due to some numpy warnings
out = v[tuple(item)]

if as_xarray:
# convert to xarray
Expand Down
2 changes: 1 addition & 1 deletion salem/gis.py
Original file line number Diff line number Diff line change
Expand Up @@ -1123,7 +1123,7 @@ def to_geometry(self, to_crs=None):
This can come also handy when doing shape-to-raster operations.
TODO: currently returns one polygon of each grid points, but this
TODO: currently returns one polygon for each grid point, but this
could do more.
Returns
Expand Down
16 changes: 7 additions & 9 deletions salem/graphics.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,18 +42,16 @@ class d2():
mpl = d1()
MPLTranform = object

from salem import utils, gis, sio, Grid, wgs84, cache_dir, GeoTiff
from salem import utils, gis, sio, Grid, wgs84, sample_data_dir, GeoTiff

# Path to the file directory
file_dir = path.join(cache_dir, 'salem-sample-data-master')
shapefiles = dict()
shapefiles['world_borders'] = path.join(file_dir, 'shapes', 'world_borders',
'world_borders.shp')
shapefiles['oceans'] = path.join(file_dir, 'shapes', 'oceans',
shapefiles['world_borders'] = path.join(sample_data_dir, 'shapes',
'world_borders', 'world_borders.shp')
shapefiles['oceans'] = path.join(sample_data_dir, 'shapes', 'oceans',
'ne_50m_ocean.shp')
shapefiles['rivers'] = path.join(file_dir, 'shapes', 'rivers',
shapefiles['rivers'] = path.join(sample_data_dir, 'shapes', 'rivers',
'ne_50m_rivers_lake_centerlines.shp')
shapefiles['lakes'] = path.join(file_dir, 'shapes', 'lakes',
shapefiles['lakes'] = path.join(sample_data_dir, 'shapes', 'lakes',
'ne_50m_lakes.shp')

# Be sure we have the directory
Expand Down Expand Up @@ -972,7 +970,7 @@ def set_scale_bar(self, location=None, length=None, maxlen=0.25,
# Units
if self.grid.proj.is_latlong():
units = 'deg'
elif length > 1000.:
elif length >= 1000.:
length /= 1000
units = 'km'
else:
Expand Down
7 changes: 4 additions & 3 deletions salem/sio.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,8 @@ def _wrf_grid_from_dataset(ds):
pargs['lat_0'] = ds.PROJ_CENTRAL_LAT
pargs['lon_0'] = ds.PROJ_CENTRAL_LON
pargs['center_lon'] = ds.PROJ_CENTRAL_LON
if ds.PROJ_NAME == 'Lambert Conformal Conic':
if ds.PROJ_NAME in ['Lambert Conformal Conic',
'WRF Lambert Conformal']:
proj_id = 1
else:
proj_id = 99 # pragma: no cover
Expand Down Expand Up @@ -198,8 +199,8 @@ def _wrf_grid_from_dataset(ds):
ny = ds.dims['south_north']
if hasattr(ds, 'PROJ_ENVI_STRING'):
# HAR
x0 = ds.GRID_X00
y0 = ds.GRID_Y00
x0 = ds['west_east'][0]
y0 = ds['south_north'][0]
else:
# Normal WRF file
e, n = gis.transform_proj(wgs84, proj, cen_lon, cen_lat)
Expand Down
9 changes: 5 additions & 4 deletions salem/tests/test_gis.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,8 @@
from salem import wgs84
import salem.gis as gis
from salem.utils import get_demo_file
from salem.tests import requires_xarray, requires_shapely, requires_geopandas, \
requires_cartopy, requires_rasterio, python_version

from salem.tests import (requires_xarray, requires_shapely, requires_geopandas,
requires_cartopy, requires_rasterio, python_version)


class SimpleNcDataSet():
Expand Down Expand Up @@ -779,7 +778,9 @@ def test_map_real_data(self):

# 3D
data = ncw.nc.variables['wrf_t2'][:]
ref_data = nct.nc.variables['t2_on_trmm_bili'][:]
with np.errstate(invalid='ignore'):
# I think this is a NetCDF4 warning
ref_data = nct.nc.variables['t2_on_trmm_bili'][:]
odata = grid_to.map_gridded_data(data, grid_from, interp='linear')
# At the borders IDL and Python take other decision on wether it
# should be a NaN or not (Python seems to be more conservative)
Expand Down
25 changes: 10 additions & 15 deletions salem/tests/test_graphics.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,26 +19,21 @@
# place holder
MPL_VERSION = '2.0.0'


from salem.graphics import ExtendedNorm, DataLevels, Map, get_cmap, shapefiles
from salem import Grid, wgs84, mercator_grid, GeoNetcdf, \
read_shapefile_to_grid, GeoTiff, GoogleCenterMap, GoogleVisibleMap, \
open_wrf_dataset, open_xr_dataset, python_version, cache_dir
from salem import (Grid, wgs84, mercator_grid, GeoNetcdf,
read_shapefile_to_grid, GeoTiff, GoogleCenterMap,
GoogleVisibleMap, open_wrf_dataset, open_xr_dataset,
python_version, cache_dir, sample_data_dir)
from salem.utils import get_demo_file
from salem.tests import (requires_matplotlib, requires_cartopy)

# Globals
current_dir = os.path.dirname(os.path.abspath(__file__))
testdir = os.path.join(current_dir, 'tmp')

if MPL_VERSION >= LooseVersion('2'):
baseline_subdir = '2.0.x'
elif MPL_VERSION >= LooseVersion('1.5'):
baseline_subdir = '1.5.x'
else:
raise ImportError('Matplotlib version not supported: ' + MPL_VERSION)
baseline_dir = os.path.join(cache_dir, 'salem-sample-data-master',
'baseline_images', baseline_subdir)
baseline_subdir = '2.0.x'
baseline_dir = os.path.join(sample_data_dir, 'baseline_images',
baseline_subdir)

tolpy2 = 5 if python_version == 'py3' else 10

Expand Down Expand Up @@ -508,7 +503,7 @@ def test_hef_topo_withnan():


@requires_matplotlib
@pytest.mark.mpl_image_compare(baseline_dir=baseline_dir, tolerance=5)
@pytest.mark.mpl_image_compare(baseline_dir=baseline_dir, tolerance=10)
def test_gmap():
g = GoogleCenterMap(center_ll=(10.762660, 46.794221), zoom=13,
size_x=640, size_y=640)
Expand All @@ -526,7 +521,7 @@ def test_gmap():


@requires_matplotlib
@pytest.mark.mpl_image_compare(baseline_dir=baseline_dir)
@pytest.mark.mpl_image_compare(baseline_dir=baseline_dir, tolerance=10)
def test_gmap_transformed():
dem = GeoTiff(get_demo_file('hef_srtm.tif'))
dem.set_subset(margin=-100)
Expand Down Expand Up @@ -555,7 +550,7 @@ def test_gmap_transformed():


@requires_matplotlib
@pytest.mark.mpl_image_compare(baseline_dir=baseline_dir, tolerance=5)
@pytest.mark.mpl_image_compare(baseline_dir=baseline_dir, tolerance=10)
def test_gmap_llconts():
# This was because some problems were left unnoticed by other tests
g = GoogleCenterMap(center_ll=(11.38, 47.26), zoom=9)
Expand Down
58 changes: 8 additions & 50 deletions salem/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,17 +4,15 @@
from __future__ import division

import io
import json
import os
import shutil
import time
import zipfile
from collections import OrderedDict

import numpy as np
from joblib import Memory
from salem import cache_dir, download_dir, python_version
from six.moves.urllib.error import HTTPError, URLError
from salem import (cache_dir, sample_data_dir, sample_data_gh_commit,
download_dir, python_version)
from six.moves.urllib.request import urlretrieve, urlopen


Expand Down Expand Up @@ -86,6 +84,7 @@ def _hash_cache_dir():
dirout = os.path.join(cache_dir, 'cache', strout)
return dirout


hash_cache_dir = _hash_cache_dir()
memory = Memory(cachedir=hash_cache_dir + '_joblib', verbose=0)

Expand Down Expand Up @@ -188,47 +187,12 @@ def download_demo_files():
Borrowed from OGGM.
"""

master_sha_url = 'https://api.github.com/repos/%s/commits/master' % \
sample_data_gh_repo
master_zip_url = 'https://github.com/%s/archive/master.zip' % \
sample_data_gh_repo
ofile = os.path.join(cache_dir, 'salem-sample-data.zip')
shafile = os.path.join(cache_dir, 'salem-sample-data-commit.txt')
master_zip_url = 'https://github.com/%s/archive/%s.zip' % \
(sample_data_gh_repo, sample_data_gh_commit)
ofile = os.path.join(cache_dir,
'salem-sample-data-%s.zip' % sample_data_gh_commit)
odir = os.path.join(cache_dir)

# a file containing the online's file's hash and the time of last check
if os.path.exists(shafile):
with open(shafile, 'r') as sfile:
local_sha = sfile.read().strip()
last_mod = os.path.getmtime(shafile)
else:
# very first download
local_sha = '0000'
last_mod = 0

# test only every hour
if time.time() - last_mod > 3600:
write_sha = True
try:
# this might fail with HTTP 403 when server overload
resp = urlopen(master_sha_url)

# following try/finally is just for py2/3 compatibility
# https://mail.python.org/pipermail/python-list/2016-March/704073.html
try:
json_str = resp.read().decode('utf-8')
finally:
resp.close()
json_obj = json.loads(json_str)
master_sha = json_obj['sha']
# if not same, delete entire dir
if local_sha != master_sha:
empty_cache()
except (HTTPError, URLError):
master_sha = 'error'
else:
write_sha = False

# download only if necessary
if not os.path.exists(ofile):
print('Downloading salem-sample-data...')
Expand All @@ -246,15 +210,9 @@ def download_demo_files():
with zipfile.ZipFile(ofile) as zf:
zf.extractall(odir)

# sha did change, replace
if write_sha:
with open(shafile, 'w') as sfile:
sfile.write(master_sha)

# list of files for output
out = dict()
sdir = os.path.join(cache_dir, 'salem-sample-data-master')
for root, directories, filenames in os.walk(sdir):
for root, directories, filenames in os.walk(sample_data_dir):
for filename in filenames:
out[filename] = os.path.join(root, filename)

Expand Down

0 comments on commit 36d608e

Please sign in to comment.