diff --git a/.travis.yml b/.travis.yml index 58354d25..83a287ee 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,5 +1,5 @@ language: generic - +os: linux dist: xenial jobs: @@ -15,8 +15,7 @@ jobs: update: true env: - CONDA_FN="Miniconda3-latest-MacOSX-x86_64.sh" - - os: linux - name: "Linux (Python3.6 + pep8)" + - name: "Linux (Python3.6 + pep8)" dist: xenial python: "3.6" addons: @@ -29,7 +28,6 @@ jobs: - os: linux name: "Linux (Python3.7 + docs)" python: "3.7" - dist: xenial addons: apt: packages: @@ -37,10 +35,8 @@ jobs: env: - CONDA_FN="Miniconda3-latest-Linux-x86_64.sh" - DOCS=true - - os: linux - name: "Linux (Python3.8 + notebooks)" + - name: "Linux (Python3.8 + notebooks)" python: "3.8" - dist: xenial addons: apt: packages: @@ -75,8 +71,8 @@ install: before_script: # # Start WPS service on port 5000 on 0.0.0.0 -# - raven start --daemon --bind-host 0.0.0.0 --port 5000 - - bash -c "source $HOME/miniconda3/bin/activate raven && make start" + # - raven start --daemon --bind-host 0.0.0.0 --port 5000 + - bash -c "source $HOME/miniconda3/bin/activate raven && make start" script: - bash -c "source $HOME/miniconda3/bin/activate raven && make test" diff --git a/docs/source/notebooks/gridded_data_subset.ipynb b/docs/source/notebooks/gridded_data_subset.ipynb index 912e4119..5703627b 100644 --- a/docs/source/notebooks/gridded_data_subset.ipynb +++ b/docs/source/notebooks/gridded_data_subset.ipynb @@ -129,7 +129,7 @@ " \n", " if not tsfile.exists():\n", " # Path to unified NetCDF ML dataset file on the THREDDS server (OPeNDAP link)\n", - " NRCAN_url='https://pavics.ouranos.ca/twitcher/ows/proxy/thredds/dodsC/birdhouse/1-Datasets/gridded_obs/nrcan_v2.ncml'\n", + " NRCAN_url='https://pavics.ouranos.ca/twitcher/ows/proxy/thredds/dodsC/datasets/gridded_obs/nrcan_v2.ncml'\n", "\n", " #Open the dataset file and slice the desired lat/lon (+1°Buffer) and limit to the time simulation duration\n", " ds=xr.open_dataset(NRCAN_url).sel(lat=slice(lat_max+1,lat_min-1), lon=slice(lon_min-1,lon_max+1), time=slice(start, stop))\n", @@ -160,7 +160,7 @@ " tsfile=tmp / 'ERA5_ts.nc'\n", " day = dt.timedelta(days=1)\n", " if not tsfile.exists(): \n", - " ERA5_url='https://pavics.ouranos.ca/twitcher/ows/proxy/thredds/dodsC/birdhouse/1-Datasets/reanalyses/era5.ncml'\n", + " ERA5_url='https://pavics.ouranos.ca/twitcher/ows/proxy/thredds/dodsC/datasets/reanalyses/era5.ncml'\n", " ds=xr.open_dataset(ERA5_url).sel(latitude=slice(lat_max+1,lat_min-1), longitude=slice(lon_min+360-1,lon_max+360+1),time=slice(start - day, stop + day))\n", "\n", " # Special treatment for ERA5 in North America: ECMWF stores ERA5 longitude in 0:360 format rather than -180:180. We need to reassign the longitudes here\n", diff --git a/environment.yml b/environment.yml index 14b7fdf9..75d0c75a 100644 --- a/environment.yml +++ b/environment.yml @@ -25,7 +25,7 @@ dependencies: - geojson - netcdf4 - libnetcdf #==4.6.2 # Avoid core dump with 4.7.1. Can be unpinned when 4.7.1 is not the latest anymore. -- gdal~=2.4 +- gdal==3.0.4 - fiona - pyproj - descartes diff --git a/requirements.txt b/requirements.txt index bc74160f..1c55ff29 100644 --- a/requirements.txt +++ b/requirements.txt @@ -19,7 +19,7 @@ spotpy statsmodels # GIS LIBRARIES # pycrs --- Depends on online database requests --> SLOW -gdal~=2.4 +# gdal~=2.4 pysheds affine rtree @@ -33,10 +33,14 @@ shapely unidecode geojson pandoc -ipython ipyleaflet ipython # needed for the parsing of Jupyter Notebooks -salem # needed for figures in some notebooks +# salem # needed for figures in some notebooks psycopg2 # to use postgres to log pywps requests like in Prod # xclim It seems to be installed by pip in src/ and confuse pytest. xskillscore +bokeh +distributed +partd +fsspec +gdal==3.0.4 diff --git a/requirements_dev.txt b/requirements_dev.txt index d3d3d627..6b801687 100644 --- a/requirements_dev.txt +++ b/requirements_dev.txt @@ -15,3 +15,4 @@ pytest-dependency pytest-notebook sphinx-autoapi urlpath +black diff --git a/tests/test_NRCAN_daily.py b/tests/test_NRCAN_daily.py index ce74020e..aa1d9ea3 100644 --- a/tests/test_NRCAN_daily.py +++ b/tests/test_NRCAN_daily.py @@ -13,7 +13,7 @@ from raven.models import HMETS -NRCAN_path = "https://pavics.ouranos.ca/twitcher/ows/proxy/thredds/dodsC/birdhouse/1-Datasets/gridded_obs/nrcan_v2.ncml" +NRCAN_path = "https://pavics.ouranos.ca/twitcher/ows/proxy/thredds/dodsC/datasets/gridded_obs/nrcan_v2.ncml" # Temporary path filepath = tempfile.mkdtemp() + "/NRCAN_ts.nc" diff --git a/tests/test_bias_correction.py b/tests/test_bias_correction.py new file mode 100644 index 00000000..99b2c2b3 --- /dev/null +++ b/tests/test_bias_correction.py @@ -0,0 +1,63 @@ +import datetime as dt +import xarray as xr +import xclim.sdba as sdba + + +class TestBiasCorrect: + def test_bias_correction(self): + + ref_data = ("https://pavics.ouranos.ca/twitcher/ows/proxy/thredds/dodsC/datasets/simulations/cmip5/atmos/" + "day_MPI-ESM-LR_historical.ncml") + fut_data = ("https://pavics.ouranos.ca/twitcher/ows/proxy/thredds/dodsC/datasets/simulations/cmip5/atmos/" + "day_MPI-ESM-LR_historical+rcp85.ncml") + hist_data = "https://pavics.ouranos.ca/twitcher/ows/proxy/thredds/dodsC/datasets/gridded_obs/nrcan_v2.ncml" + + lat = 54.484 + lon = -123.36 + + # CAREFUL! ERA5 IS NOT ThE SAME LONGITUDE + # Also, latitude goes from high to low, so I need to have it go from lat+1 to lat-1 in the slice. + # For the NRCan dataset, I cannot have more than about 10 years as I get a "NetCDF: DAP failure" which I think + # is related to a timeout. + ds = ( + xr.open_dataset(hist_data) + .sel( + lat=slice(lat + 1, lat - 1), + lon=slice(lon - 1, lon + 1), + time=slice(dt.datetime(1991, 1, 1), dt.datetime(2010, 12, 31)), + ) + .mean(dim={"lat", "lon"}, keep_attrs=True) + ) + + # For lon in 0-360 format, need to add an auto-checker. + lon = 260 + ds2 = ( + xr.open_dataset(ref_data) + .sel( + lat=slice(lat - 1, lat + 1), + lon=slice(lon - 1, lon + 1), + time=slice(dt.datetime(1981, 1, 1), dt.datetime(2010, 12, 31)), + ) + .mean(dim={"lat", "lon"}, keep_attrs=True) + ) + ds3 = ( + xr.open_dataset(fut_data) + .sel( + lat=slice(lat - 1, lat + 1), + lon=slice(lon - 1, lon + 1), + time=slice(dt.datetime(2041, 1, 1), dt.datetime(2070, 12, 31)), + ) + .mean(dim={"lat", "lon"}, keep_attrs=True) + ) + + # Here data in ds, ds2 and ds3 are NaN! + + group_month_nowindow = sdba.utils.Grouper("time.month") + Adj = sdba.DetrendedQuantileMapping( + nquantiles=50, kind="+", group=group_month_nowindow + ) + Adj.train(ds["pr"], ds2["pr"]) + Adj.adjust(ds3["pr"], interp="linear") + Adj.ds.af # adjustment factors. + + print(Adj.ds.af)