Skip to content

Commit

Permalink
Add appveyor script (#15)
Browse files Browse the repository at this point in the history
  • Loading branch information
caspervdw committed Nov 1, 2019
1 parent aea91ab commit 06c359e
Show file tree
Hide file tree
Showing 7 changed files with 66 additions and 3 deletions.
4 changes: 3 additions & 1 deletion CHANGES.rst
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,9 @@ Changelog of dask-geomodeling
2.0.5 (unreleased)
------------------

- Nothing changed yet.
- Added RasterFileSource.close_dataset to close the GDAL file handle.

- Run unittests on windows.


2.0.4 (2019-11-01)
Expand Down
3 changes: 3 additions & 0 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@ dask-geomodeling
.. image:: https://travis-ci.com/nens/dask-geomodeling.svg?branch=master
:target: https://travis-ci.com/nens/dask-geomodeling

.. image:: https://ci.appveyor.com/api/projects/status/aopxohgl23llkeq8?svg=true
:target: https://ci.appveyor.com/project/reinout/dask-geomodeling

.. image:: https://badge.fury.io/py/dask-geomodeling.svg
:target: https://badge.fury.io/py/dask-geomodeling

Expand Down
42 changes: 42 additions & 0 deletions appveyor.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
environment:
matrix:
- PYTHON_PATH: "C:\\Miniconda37"
PYTHON_VERSION: 3.7
DEPS: "numpy gdal scipy pytz dask-core toolz geopandas"

- PYTHON_PATH: "C:\\Miniconda37-x64"
PYTHON_VERSION: 3.7
DEPS: "numpy gdal scipy pytz dask-core toolz geopandas"

init:
- "ECHO %PYTHON% %PYTHON_VERSION%"

install:
# If there is a newer build queued for the same PR, cancel this one.
# The AppVeyor 'rollout builds' option is supposed to serve the same
# purpose but it is problematic because it tends to cancel builds pushed
# directly to master instead of just PR builds (or the converse).
# credits: JuliaLang developers.
- ps: if ($env:APPVEYOR_PULL_REQUEST_NUMBER -and $env:APPVEYOR_BUILD_NUMBER -ne ((Invoke-RestMethod `
https://ci.appveyor.com/api/projects/$env:APPVEYOR_ACCOUNT_NAME/$env:APPVEYOR_PROJECT_SLUG/history?recordsNumber=50).builds | `
Where-Object pullRequestId -eq $env:APPVEYOR_PULL_REQUEST_NUMBER)[0].buildNumber) { `
throw "There are newer queued builds for this pull request, failing early." }

# Prepend to the PATH of this build
- "set PATH=%PYTHON_PATH%;%PYTHON_PATH%\\Scripts;%CONDA_ROOT%\\Library\\bin;%PATH%"
- conda config --set always_yes true
- conda update --quiet conda

# See https://github.com/conda/conda/issues/8836 ("yes, this is insane")
- activate
- conda info --all
- conda config --append channels conda-forge
- conda create -n testenv --yes python=%PYTHON_VERSION% %DEPS% pytest

# Do this. Don't "conda activate testenv".
- activate testenv

build: false

test_script:
- pytest
7 changes: 7 additions & 0 deletions dask_geomodeling/raster/sources.py
Original file line number Diff line number Diff line change
Expand Up @@ -298,6 +298,9 @@ class RasterFileSource(RasterBlock):
The global root path can be adapted as follows:
>>> from dask import config
>>> config.set({"geomodeling.root": "/my/data/path"})
Note that this object keeps a file handle open. If you need to close the
file handle, call block.close_dataset (or dereference the whole object).
"""

def __init__(self, url, time_first=0, time_delta=300000):
Expand Down Expand Up @@ -333,6 +336,10 @@ def gdal_dataset(self):
self._gdal_dataset = gdal.Open(path)
return self._gdal_dataset

def close_dataset(self):
if hasattr(self, "_gdal_dataset"):
self._gdal_dataset = None

@property
def projection(self):
return utils.get_epsg_or_wkt(self.gdal_dataset.GetProjection())
Expand Down
4 changes: 2 additions & 2 deletions dask_geomodeling/tests/test_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ def test_tokenize_shapely_geometry(self):
def test_tokenize_datetime(self):
"""Compare tokens of different datetimes"""
hashes = set()
for n in np.random.randint(0, 10000000000, self.N):
for n in np.random.randint(0, 2000000000, self.N):
token1 = tokenize(datetime.fromtimestamp(n))
token2 = tokenize(datetime.fromtimestamp(n))

Expand All @@ -153,7 +153,7 @@ def test_tokenize_datetime(self):
def test_tokenize_timedelta(self):
"""Compare tokens of different timedeltas"""
hashes = set()
for n in np.random.randint(0, 10000000000, self.N):
for n in np.random.randint(0, 2000000000, self.N):
token1 = tokenize(timedelta(microseconds=int(n)))
token2 = tokenize(timedelta(microseconds=int(n)))

Expand Down
3 changes: 3 additions & 0 deletions dask_geomodeling/tests/test_raster_sources.py
Original file line number Diff line number Diff line change
Expand Up @@ -239,3 +239,6 @@ def setUp(self):
time_first=datetime(2000, 1, 1),
time_delta=timedelta(days=1),
)

def tearDown(self):
self.source.close_dataset() # needed for the tearDownClass
6 changes: 6 additions & 0 deletions dask_geomodeling/tests/test_utils.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
from unittest import mock
import unittest
import pytest
import sys

from osgeo import osr
from shapely import geometry
Expand Down Expand Up @@ -84,6 +86,10 @@ def test_get_footprint(self):
)
self.assertTrue(np.equal(output, reference).all())

@pytest.mark.skipif(
sys.platform.startswith("win"),
reason="Path tests are not yet written for windows",
)
def test_safe_file_url(self):
# prepends file:// if necessary
self.assertEqual(utils.safe_file_url("/tmp"), "file:///tmp")
Expand Down

0 comments on commit 06c359e

Please sign in to comment.