Skip to content

Commit

Permalink
Pickles are now updated when pandas version changes (#75)
Browse files Browse the repository at this point in the history
  • Loading branch information
fmaussion committed May 7, 2017
1 parent dc2f5ac commit 5385465
Show file tree
Hide file tree
Showing 4 changed files with 34 additions and 16 deletions.
2 changes: 2 additions & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,8 @@ matrix:
- python: 3.5
env: CONDA_ENV=py35-all-rc MPL=--mpl
allow_failures:
- python: 3.5
env: CONDA_ENV=py35-xr MPL=
- python: 3.5
env: CONDA_ENV=py35-xarray-dev MPL=--mpl
- python: 3.5
Expand Down
11 changes: 10 additions & 1 deletion docs/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,17 @@ Enhancements
~~~~~~~~~~~~

- :py:func:`~transform_geopandas` can now handle grid to proj transformations.
- New :py:func:`~reduce` function, usefull to aggregate structured high-res
- New :py:func:`~reduce` function, useful to aggregate structured high-res
grids to lower-res grids (:ref:`sphx_glr_auto_examples_plot_subgrid_mask.py`)
- new :py:func:`~Grid.to_geometry` method, useful to compute precise
vector to raster masks (TODO: example showing its use)


Bug fixes
~~~~~~~~~

- the cache directory is also updated when the ``pandas`` version changes
(:issue:`74`)


v0.2.1 (07 February 2017)
Expand Down
18 changes: 10 additions & 8 deletions salem/tests/test_misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,6 @@

current_dir = os.path.dirname(os.path.abspath(__file__))
testdir = os.path.join(current_dir, 'tmp')
if not os.path.exists(testdir):
os.makedirs(testdir)


@requires_geopandas
Expand Down Expand Up @@ -69,9 +67,9 @@ def tearDown(self):
def test_empty_cache(self):
utils.empty_cache()

def test_joblibcache(self):
h1 = utils._joblib_cache_dir()
h2 = utils._joblib_cache_dir()
def test_hash_cache_dir(self):
h1 = utils._hash_cache_dir()
h2 = utils._hash_cache_dir()
self.assertEqual(h1, h2)

def test_demofiles(self):
Expand Down Expand Up @@ -496,6 +494,13 @@ def test_projplot(self):

class TestXarray(unittest.TestCase):

def setUp(self):
if not os.path.exists(testdir):
os.makedirs(testdir)

def tearDown(self):
delete_test_dir()

@requires_xarray
def test_era(self):

Expand Down Expand Up @@ -905,9 +910,6 @@ def test_mf_datasets(self):

import xarray as xr

if not os.path.exists(testdir):
os.makedirs(testdir)

# prepare the data
f = get_demo_file('wrf_d01_allvars_cropped.nc')
ds = xr.open_dataset(f)
Expand Down
19 changes: 12 additions & 7 deletions salem/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@
from six.moves.urllib.request import urlretrieve, urlopen


def _joblib_cache_dir():
"""Get the path to the right joblib directory.
def _hash_cache_dir():
"""Get the path to the right cache directory.
We need to make sure that cached files correspond to the same
environment. To this end we make a unique directory hash, depending on the
Expand Down Expand Up @@ -47,6 +47,12 @@ def _joblib_cache_dir():
out['fiona_file'] = fiona.__file__
except ImportError:
pass
try:
import pandas
out['pandas_version'] = pandas.__version__
out['pandas_file'] = pandas.__file__
except ImportError:
pass
try:
import geopandas
out['geopandas_version'] = geopandas.__version__
Expand Down Expand Up @@ -77,12 +83,11 @@ def _joblib_cache_dir():
for k, v in out.items():
strout += k + v
strout = 'salem_hash_' + hashlib.md5(strout.encode()).hexdigest()
dirout = os.path.join(cache_dir, 'joblib', strout)
if not os.path.exists(dirout):
os.makedirs(dirout)
dirout = os.path.join(cache_dir, 'cache', strout)
return dirout

memory = Memory(cachedir=_joblib_cache_dir(), verbose=0)
hash_cache_dir = _hash_cache_dir()
memory = Memory(cachedir=hash_cache_dir + '_joblib', verbose=0)

# A series of variables and dimension names that Salem will understand
valid_names = dict()
Expand Down Expand Up @@ -144,7 +149,7 @@ def cached_shapefile_path(fpath):

# Cached directory and file
cp = os.path.commonprefix([cache_dir, p])
cp = os.path.join(cache_dir, python_version + '_cache',
cp = os.path.join(cache_dir, hash_cache_dir + '_shp',
os.path.relpath(p, cp))
ct = '{:d}'.format(int(round(os.path.getmtime(fpath)*1000.)))
of = os.path.join(cp, ct + '.p')
Expand Down

0 comments on commit 5385465

Please sign in to comment.