From 808fb7aa1ae7d1cce01540308cef2e1a845b550e Mon Sep 17 00:00:00 2001 From: Craig Arthur Date: Thu, 8 Oct 2020 08:27:35 +1100 Subject: [PATCH 01/11] Update README.rst --- README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index c7586d25..e8bd5830 100644 --- a/README.rst +++ b/README.rst @@ -79,8 +79,8 @@ Status :target: https://landscape.io/github/GeoscienceAustralia/tcrm/develop :alt: Code Health -.. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.3741493.svg - :target: https://doi.org/10.5281/zenodo.3741493 +.. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.4070660.svg + :target: https://doi.org/10.5281/zenodo.4070660 Screenshot ========== From 69f911d2091af61b67a82d0d866a738019328284 Mon Sep 17 00:00:00 2001 From: Craig Arthur Date: Mon, 9 Nov 2020 13:21:36 +1100 Subject: [PATCH 02/11] Update version number --- conf.py | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/conf.py b/conf.py index 28910ef6..a0743520 100644 --- a/conf.py +++ b/conf.py @@ -53,7 +53,7 @@ # built documents. # # The short X.Y version. -version = '2.1' +version = '3.1' # The full version, including alpha/beta/rc tags. release = latest_tag.name diff --git a/setup.py b/setup.py index 64bd0a6b..ba3b06e5 100644 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ setup( name = "TCRM", - version = '3.1.3', + version = '3.1.4', packages=find_packages(), scripts=['tcrm.py', 'tcevent.py'], include_package_data=True, From f6deda20e7242fbd4d1e10fcea3dad3c49cb095e Mon Sep 17 00:00:00 2001 From: Craig Arthur Date: Mon, 9 Nov 2020 14:51:58 +1100 Subject: [PATCH 03/11] Merge commit 'ea8e3b816cf01089a304e121669880ef193c4b23' --- Utilities/config.py | 3 ++- tcevent.py | 7 +++++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/Utilities/config.py b/Utilities/config.py index 25ea922b..facc37b5 100644 --- a/Utilities/config.py +++ b/Utilities/config.py @@ -137,7 +137,8 @@ def formatList(lst): 'WindfieldInterface_thetamax': float, 'WindfieldInterface_trackfile': str, 'WindfieldInterface_trackpath': str, - 'WindfieldInterface_windfieldtype': str} + 'WindfieldInterface_windfieldtype': str, + 'WindfieldInterface_plotoutput': parseBool} DEFAULTS = """ [Actions] diff --git a/tcevent.py b/tcevent.py index d6c48eba..f48962d3 100755 --- a/tcevent.py +++ b/tcevent.py @@ -36,7 +36,6 @@ from Utilities.version import version from Utilities.progressbar import SimpleProgressBar as ProgressBar from Evaluate import interpolateTracks -from PlotInterface.maps import saveWindfieldMap __version__ = version() @@ -119,6 +118,8 @@ def doWindfieldPlotting(configFile): """ from netCDF4 import Dataset import numpy as np + from PlotInterface.maps import saveWindfieldMap + config = ConfigParser() config.read(configFile) outputPath = config.get('Output', 'Path') @@ -205,7 +206,9 @@ def status(done, total): import impact impact.run_optional(config) - doWindfieldPlotting(configFile) + if config.getboolean('WindfieldInterface', 'PlotOutput'): + doWindfieldPlotting(configFile) + if config.getboolean('Timeseries', 'Extract'): doTimeseriesPlotting(configFile) From 1ead7bd935eae3a042f6f7c870536ae921baed97 Mon Sep 17 00:00:00 2001 From: Craig Arthur Date: Wed, 11 Nov 2020 14:32:34 +1100 Subject: [PATCH 04/11] Read netcdf-format track file from GA SST GA's Scenario selection tool allows users to download a selected track file, but due to technical constraints this is in a different format. This change allows TCRM to read the modified format. --- Utilities/track.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/Utilities/track.py b/Utilities/track.py index 1e6d94e1..2faa35ea 100644 --- a/Utilities/track.py +++ b/Utilities/track.py @@ -199,6 +199,24 @@ def ncReadTrackData(trackfile): raise IOError("Cannot open {0}".format(trackfile)) g = ncobj.groups + if not bool(g): + # We have a track file that stores data in separate variables + log.debug(f"Reading data from a single track file") + dt = ncobj.variables['Datetime'] + units = ncobj.getncattr('time_units') + calendar = ncobj.getncattr('calendar') + dtt = num2date(dt[:], units, calendar) + newtd = np.zeros(len(dtt), dtype=track_dtype) + for f in ncobj.variables.keys(): + if f != 'Datetime' and f in track_dtype.names: + newtd[f] = ncobj.variables[f][:] + newtd['Datetime'] = dtt + track = Track(newtd) + track.trackfile = trackfile + track.trackId = eval(ncobj.trackId) + + return [track] + tracks = [] if 'tracks' in g: tgroup = g['tracks'].groups From 1916233f7dfdecf6a1b5f5b0d89f3eb1d164bd3e Mon Sep 17 00:00:00 2001 From: Craig Arthur Date: Thu, 12 Nov 2020 18:25:52 +1100 Subject: [PATCH 05/11] Fix up a couple of tests that were failing --- PlotInterface/maps.py | 4 ++-- tests/test_system.py | 6 +++++- wind/writer.py | 2 +- 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/PlotInterface/maps.py b/PlotInterface/maps.py index 3fe9362c..53d4b3e1 100644 --- a/PlotInterface/maps.py +++ b/PlotInterface/maps.py @@ -196,8 +196,8 @@ def addGraticule(self, axes, mapobj): draw_labels=True) gl.xformatter = LONGITUDE_FORMATTER gl.yformatter = LATITUDE_FORMATTER - gl.xlabels_top = False - gl.ylabels_right = False + gl.top_labels = False + gl.right_labels = False def addCoastline(self, mapobj): """ diff --git a/tests/test_system.py b/tests/test_system.py index 157b6223..9c5b5447 100644 --- a/tests/test_system.py +++ b/tests/test_system.py @@ -38,7 +38,7 @@ def setUp(self): self.addCleanup(Utilities.config.reset) self.tmpdir = tempfile.TemporaryDirectory() - self.addCleanup(self.tmpdir.cleanup) + #self.addCleanup(self.tmpdir.cleanup) self.configFile = os.path.join( Utilities.pathLocator.getRootDirectory(), @@ -48,6 +48,7 @@ def setUp(self): config = Utilities.config.ConfigParser() config.read(self.configFile) config['Output']['Path'] = self.tmpdir.name + config['WindfieldInterface']['PlotOutput'] = 'True' @decimate(100) def test_scenario(self): @@ -71,5 +72,8 @@ def test_scenario(self): self.assertGreater(white.sum() / pixels, 0.2) # substantial space self.assertGreater(color.sum() / pixels, 0.05) # significant color + from time import sleep + sleep(1) + if __name__ == '__main__': unittest.main() diff --git a/wind/writer.py b/wind/writer.py index d06b9a36..405d6888 100644 --- a/wind/writer.py +++ b/wind/writer.py @@ -111,7 +111,7 @@ def __call__(self, time, gust, Ux, Uy, P, lon, lat): t = len(self.time) if not t: - self.time.units = "days since " + time.strftime() + self.time.units = "days since " + time.strftime("%Y-%m-%d %H:%M") # convert window extent to slice indices origin = np.rint(self.affine * (lon[0], lat[0])).astype(int) From 41580d9d9a910293f1c23ed57c97809b61e452bd Mon Sep 17 00:00:00 2001 From: Craig Arthur Date: Fri, 16 Apr 2021 15:14:55 +1000 Subject: [PATCH 06/11] Bugfix: getPoci returns np.nan incorrectly --- Utilities/loadData.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Utilities/loadData.py b/Utilities/loadData.py index 1e57137e..94f9124e 100644 --- a/Utilities/loadData.py +++ b/Utilities/loadData.py @@ -710,7 +710,7 @@ def getPoci(penv, pcentre, lat, jdays, eps, nvidx = np.where(pcentre == missingValue) poci_model[nvidx] = np.nan - nvidx = np.where(penv <= pcentre) + nvidx = np.where(penv < pcentre) poci_model[nvidx] = np.nan elif penv < pcentre: From 73c2049abee7763037dbc36191a4e9b55869a2d5 Mon Sep 17 00:00:00 2001 From: Craig Arthur Date: Thu, 22 Apr 2021 11:14:19 +1000 Subject: [PATCH 07/11] Update to Github actions Also fix datetime issues in track.ncReadTrackData --- .github/workflows/tcrm-tests.yml | 29 +++++++++++++++++++++++++++++ Evaluate/interpolateTracks.py | 4 ++-- README.rst | 12 ++++++------ Utilities/track.py | 8 ++++++-- 4 files changed, 43 insertions(+), 10 deletions(-) create mode 100644 .github/workflows/tcrm-tests.yml diff --git a/.github/workflows/tcrm-tests.yml b/.github/workflows/tcrm-tests.yml new file mode 100644 index 00000000..1bf1bdbd --- /dev/null +++ b/.github/workflows/tcrm-tests.yml @@ -0,0 +1,29 @@ +# This workflow will install Python dependencies, run tests and lint with a variety of Python versions +# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions + +name: Unit tests for TCRM + +on: + push: + branches: [ master, develop ] + pull_request: + branches: [ master, develop ] + +jobs: + Hazimp: + name: Test HazImp + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Set up environment + uses: conda-incubator/setup-miniconda@v2.0.0 + with: + activate-environment: tcrm + environment-file: tcrmenv.yml + python-version: 3.7 + auto-activate-base: false + + - name: Test with nose + shell: bash -l {0} + run: | + python tests/run.py diff --git a/Evaluate/interpolateTracks.py b/Evaluate/interpolateTracks.py index c1b551d6..924fd2b1 100644 --- a/Evaluate/interpolateTracks.py +++ b/Evaluate/interpolateTracks.py @@ -84,7 +84,7 @@ def interpolate(track, delta, interpolation_type=None): else: raise dt_ = 24.0 * np.diff(time_) - dt = np.empty(len(track.data), dtype=float) + dt = np.zeros(len(track.data), dtype=float) dt[1:] = dt_ # Convert all times to a time after initial observation: @@ -93,13 +93,13 @@ def interpolate(track, delta, interpolation_type=None): newtime = np.arange(timestep[0], timestep[-1] + .01, delta) newtime[-1] = timestep[-1] _newtime = (newtime / 24.) + time_[0] - newdates = num2date(_newtime) newdates = np.array([n.replace(tzinfo=None) for n in newdates]) if not hasattr(track, 'Speed'): idx = np.zeros(len(track.data)) idx[0] = 1 + # TODO: Possibly could change `np.mean(dt)` to `dt`? track.WindSpeed = maxWindSpeed(idx, np.mean(dt), track.Longitude, track.Latitude, track.CentralPressure, track.EnvPressure) diff --git a/README.rst b/README.rst index e8bd5830..0640a71b 100644 --- a/README.rst +++ b/README.rst @@ -65,18 +65,18 @@ TCRM requires: Status ====== -.. image:: https://travis-ci.org/GeoscienceAustralia/tcrm.svg?branch=develop - :target: https://travis-ci.org/GeoscienceAustralia/tcrm +.. image:: https://github.com/GeoscienceAustralia/tcrm/actions/workflows/tcrm-tests.yml/badge.svg?branch=master + :target: https://github.com/GeoscienceAustralia/tcrm/actions/workflows/tcrm-tests.yml :alt: Build status -.. image:: https://coveralls.io/repos/GeoscienceAustralia/tcrm/badge.svg?branch=develop - :target: https://coveralls.io/r/GeoscienceAustralia/tcrm?branch=develop +.. image:: https://coveralls.io/repos/GeoscienceAustralia/tcrm/badge.svg?branch=master + :target: https://coveralls.io/r/GeoscienceAustralia/tcrm?branch=master :alt: Test coverage -.. image:: https://landscape.io/github/GeoscienceAustralia/tcrm/develop/landscape.svg?style=flat - :target: https://landscape.io/github/GeoscienceAustralia/tcrm/develop +.. image:: https://landscape.io/github/GeoscienceAustralia/tcrm/master/landscape.svg?style=flat + :target: https://landscape.io/github/GeoscienceAustralia/tcrm/master :alt: Code Health .. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.4070660.svg diff --git a/Utilities/track.py b/Utilities/track.py index bdca9e97..1cca7c1d 100644 --- a/Utilities/track.py +++ b/Utilities/track.py @@ -25,6 +25,7 @@ from Utilities.maputils import bearing2theta from netCDF4 import Dataset, date2num, num2date +from cftime import num2pydate try: from exceptions import WindowsError @@ -206,11 +207,13 @@ def ncReadTrackData(trackfile): units = ncobj.getncattr('time_units') calendar = ncobj.getncattr('calendar') dtt = num2date(dt[:], units, calendar) + # Convert to true python datetimes + dtconversion = [datetime.strptime(d.strftime(), "%Y-%m-%d %H:%M:%S") for d in dtt] newtd = np.zeros(len(dtt), dtype=track_dtype) for f in ncobj.variables.keys(): if f != 'Datetime' and f in track_dtype.names: newtd[f] = ncobj.variables[f][:] - newtd['Datetime'] = dtt + newtd['Datetime'] = dtconversion track = Track(newtd) track.trackfile = trackfile track.trackId = eval(ncobj.trackId) @@ -237,7 +240,8 @@ def ncReadTrackData(trackfile): for f in track_data.dtype.names: if f != 'Datetime' and f in track_dtype.names: newtd[f] = track_data[f] - newtd['Datetime'] = dt + dtconversion = [datetime.strptime(d.strftime(), "%Y-%m-%d %H:%M:%S") for d in dt] + newtd['Datetime'] = dtconversion track = Track(newtd) track.trackfile = trackfile From 6598a5907685f0b5fdfb9e8bc115ae3d285fd73f Mon Sep 17 00:00:00 2001 From: Craig Arthur Date: Wed, 3 Nov 2021 11:50:06 +1100 Subject: [PATCH 08/11] Squashed commit of the following: * Nhirs 148 fix thread (#119) * Add script to run through all permutations of wind parameters * Add readthedocs config file, update pylintrc * Move queries to separate file * Move definition statements to separate file * Update tcrm-tests.yml to include Python 3.9 * Remove travis CI tests * Update DOI badge * Change build status badge to github actions * Read netcdf-format track file from GA SST: GA's Scenario selection tool allows users to download a selected track file, but due to technical constraints this is in a different format. This change allows TCRM to read the modified format. * Bugfix: getPoci returns np.nan incorrectly * track.ncReadTrackData returns true datetime objects: track.ncReadTrackData previously returned cftime.DatetimeGregorian objects, which caused newer versions of matplotlib.dates.num2date to fail. This is because we write the tracks with units of 'hours since 1900-01-01 00:00', but matplotlib.dates uses 1970-01-01 as the epoch, and works in units of days (with no way to specify units in the num2date function). * getPoci() returns np.nan incorrectly --- .github/workflows/tcrm-pylint.yml | 34 +++ .github/workflows/tcrm-tests.yml | 9 +- .readthedocs.yaml | 22 ++ .travis.yml | 30 -- Evaluate/windFieldValidation.py | 276 ++++++++++++++++++ PlotInterface/AutoPlotHazard.py | 3 +- ProcessMultipliers/processMultipliers.py | 85 +++--- README.rst | 4 +- Utilities/loadData.py | 3 +- convergenceTest.py | 15 +- database/__init__.py | 355 +++-------------------- database/definitions.py | 74 +++++ database/queries.py | 250 ++++++++++++++++ input/stationlist.dbf | Bin 14908389 -> 14911615 bytes input/stationlist.shp | Bin 388404 -> 388488 bytes input/stationlist.shx | Bin 111044 -> 111068 bytes postinstall.sh | 46 --- preinstall.sh | 24 -- pylintrc | 2 +- wind/__init__.py | 8 +- 20 files changed, 758 insertions(+), 482 deletions(-) create mode 100644 .github/workflows/tcrm-pylint.yml create mode 100644 .readthedocs.yaml delete mode 100644 .travis.yml create mode 100644 Evaluate/windFieldValidation.py create mode 100644 database/definitions.py create mode 100644 database/queries.py delete mode 100644 postinstall.sh delete mode 100644 preinstall.sh diff --git a/.github/workflows/tcrm-pylint.yml b/.github/workflows/tcrm-pylint.yml new file mode 100644 index 00000000..d4f1bb93 --- /dev/null +++ b/.github/workflows/tcrm-pylint.yml @@ -0,0 +1,34 @@ +name: Pylint tests for TCRM + +on: + push: + branches: [ master, develop ] + +jobs: + build: + name: Pylint TCRM + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - name: Set up Python env + uses: conda-incubator/setup-miniconda@v2.0.0 + with: + activate-environment: tcrm + environment-file: tcrmenv.yml + python-version: 3.7 + auto-activate-base: false + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install pylint + - name: Analysing the code with pylint + run: | + pylint --rcfile pylintrc --fail-under=7 `find -regextype egrep -regex '(.*.py)$'` | + tee pylint.txt + - name: Upload pylint.txt as artifact + uses: actions/upload-artifact@v2 + with: + name: pylint report + path: pylint.txt diff --git a/.github/workflows/tcrm-tests.yml b/.github/workflows/tcrm-tests.yml index 1bf1bdbd..3e68e830 100644 --- a/.github/workflows/tcrm-tests.yml +++ b/.github/workflows/tcrm-tests.yml @@ -10,9 +10,12 @@ on: branches: [ master, develop ] jobs: - Hazimp: - name: Test HazImp + TCRM: + name: Test TCRM runs-on: ubuntu-latest + strategy: + matrix: + python-version: [3.7, 3.8, 3.9] steps: - uses: actions/checkout@v2 - name: Set up environment @@ -20,7 +23,7 @@ jobs: with: activate-environment: tcrm environment-file: tcrmenv.yml - python-version: 3.7 + python-version: ${{ matrix.python-version }} auto-activate-base: false - name: Test with nose diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 00000000..c0c9b0d5 --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,22 @@ +# .readthedocs.yaml +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Build documentation in the docs/ directory with Sphinx +sphinx: + builder: html + configuration: conf.py + +# Optionally build your docs in additional formats such as PDF +formats: + - pdf + +# Optionally set the version of Python and requirements required to build your docs +python: + version: 3.7 + +conda: + environment: tcrmenv.yml \ No newline at end of file diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index aef70720..00000000 --- a/.travis.yml +++ /dev/null @@ -1,30 +0,0 @@ -language: shell - -env: - - PYTHON_VERSION=3.6 - - PYTHON_VERSION=3.7 - - PYTHON_VERSION=3.8 - -os: - - linux - - windows - -before_install: - - source ./preinstall.sh - -install: - - source ./postinstall.sh - -branches: - except: - - config - - notebooks - -script: - - python installer/setup.py build_ext -i - - nosetests -v --with-coverage --cover-package=. - -after_success: coveralls -notifications: - slack: - secure: Ckmwy59ytS1GPRZ5Tmvzad6+W9AzvfjNJAa4orgdKS/WktoK4b9W2rbTHxi8V3hBLIDUCso8vIQi3rVXpWY3cFMvb/uRbXO4GiIW1iua3CKjxd+dEw4E6/8DEknS1qdGJRDhN9/3ucZNvSGHY3EQQDfxb/R+OGd2jT6+jed8pss= diff --git a/Evaluate/windFieldValidation.py b/Evaluate/windFieldValidation.py new file mode 100644 index 00000000..4553b6eb --- /dev/null +++ b/Evaluate/windFieldValidation.py @@ -0,0 +1,276 @@ +import itertools +#import matplotlib.pyplot as plt +#from matplotlib import cm as cmap +import numpy as np + +import xarray as xr +#import seaborn as sns + +from wind import windmodels +from Utilities import metutils +from Utilities.maputils import bearing2theta, makeGrid, meshLatLon +from Utilities.parallel import attemptParallel + + +#sns.set_style('ticks', {'image.cmap':'coolwarm'}) +#sns.set_context('poster') +#palette = [(1, 1, 1), (0.000, 0.627, 0.235), (0.412, 0.627, 0.235), (0.663, 0.780, 0.282), +# (0.957, 0.812, 0.000), (0.925, 0.643, 0.016), (0.835, 0.314, 0.118), +# (0.780, 0.086, 0.118)] +#cmap = sns.blend_palette(palette, as_cmap=True) + +def polarGridAroundEye(lon, lat, margin=2, resolution=0.02): + R, theta = makeGrid(lon, lat, margin, resolution) + return R, theta + +def meshGrid(lon, lat, margin=2, resolution=0.02): + xgrid, ygrid = meshLatLon(lon, lat, margin, resolution) + return xgrid, ygrid + +def calculateWindField(lon, lat, pEnv, pCentre, rMax, vFm, thetaFm, beta, + profileType='powell', windFieldType='kepert'): + + pCentre = metutils.convert(pCentre, 'hPa', 'Pa') + pEnv = metutils.convert(pEnv, 'hPa', 'Pa') + vFm = metutils.convert(vFm, 'kmh', 'mps') + thetaFm = bearing2theta(np.pi * thetaFm / 180.) + thetaMax = 70. + rmax = metutils.convert(rMax, 'km', 'm') + cls = windmodels.profile(profileType) + if profileType=="holland": + profile = cls(lat, lon, pEnv, pCentre, rmax, beta) + else: + profile = cls(lat, lon, pEnv, pCentre, rmax) + R, theta = polarGridAroundEye(lon, lat, 5.) + gradV = profile.velocity(R*1000) + cls = windmodels.field(windFieldType) + windfield = cls(profile) + Ux, Vy = windfield.field(R*1000, theta, vFm, thetaFm, thetaMax) + + surfV = np.sqrt(Ux*Ux+Vy*Vy)*1.268 # Gust conversion factor + return gradV, surfV + +""" +lat = np.arange(-30, -4, 2, dtype=float) +pc = np.arange(900, 991, 5, dtype=float) +pe = np.arange(995, 1016, dtype=float) +rm = np.arange(10, 91, 5, dtype=float) +vfm = np.arange(0, 51, 5, dtype=float) +gwind = np.zeros((len(lat), len(pc), len(pe), len(rm), len(vfm))) +swind = np.zeros((len(lat), len(pc), len(pe), len(rm), len(vfm))) +it = np.nditer(gwind, flags=['multi_index']) +nn = gwind.size +print(nn) + +lon = 120. +thetaFm = 70 +beta = 1.6 +profileType = "powell" +blmodel = "kepert" +i = 0 + + + +for x in it: + il, ic, ip, ir, iv = it.multi_index + gradV, surfV = calculateWindField(lon, lat[il], pe[ip], pc[ic], + rm[ir], vfm[iv], thetaFm, beta, + profileType=profileType, + windFieldType=blmodel) + gwind[it.multi_index] = np.max(gradV) + swind[it.multi_index] = np.max(surfV) + i += 1 + print(f"{100*i/nn:0.4f} %") + +coords = [ + ("latitude", lat, dict(long_name="Latitude", + units="degrees_south")), + ("pcentre", pc, dict(long_name="Central pressure", + units="hPa")), + ("penv", pe, dict(long_name="Environmental pressure", + units="hPa")), + ("rmax", rm, dict(long_name="Radius to maximum winds", + units="km")), + ("vfm", vfm, dict(long_name="Forward speed", + units="km/h")) +] + +dims = ["latitude", 'pcentre', 'penv', 'rmax', 'vfm'] +gattrs = { + "long_name": "Gradient level wind speed", + "profile": profileType, + "blmodel": blmodel, + "description": "maximum gradient level wind speed", + "units": "m s-1", + } +sattrs = { + "long_name": "Surface wind speed", + "profile": profileType, + "blmodel": blmodel, + "description": "maximum 0.2-s wind gust", + "units": "m s-1", + } + + +gda = xr.DataArray(gwind, dims=dims, coords=coords, attrs=gattrs) +sda = xr.DataArray(swind, dims=dims, coords=coords, attrs=sattrs) +ds = xr.Dataset() +ds['gradwind'] = gda +ds['surfwind'] = sda +ds.to_netcdf("output.nc") +""" + +def balanced(iterable): + """ + Balance an iterator across processors. + + This partitions the work evenly across processors. However, it + requires the iterator to have been generated on all processors + before hand. This is only some magical slicing of the iterator, + i.e., a poor man version of scattering. + """ + P, p = MPI.COMM_WORLD.size, MPI.COMM_WORLD.rank + return itertools.islice(iterable, p, None, P) + +def run(): + lat = np.arange(-30, -4, 2, dtype=float) + pc = np.arange(900, 991, 5, dtype=float) + pe = np.arange(995, 1016, dtype=float) + rm = np.arange(10, 91, 5, dtype=float) + vfm = np.arange(0, 51, 5, dtype=float) + gwind = np.zeros((len(lat), len(pc), len(pe), len(rm), len(vfm))) + swind = np.zeros((len(lat), len(pc), len(pe), len(rm), len(vfm))) + it = np.nditer(gwind, flags=['multi_index']) + nn = gwind.size + #print(nn) + + lon = 120. + thetaFm = 70 + beta = 1.6 + profileType = "powell" + blmodel = "kepert" + i = 0 + + # Attempt to start the track generator in parallel + global MPI + MPI = attemptParallel() + comm = MPI.COMM_WORLD + + status = MPI.Status() + worktag = 0 + resulttag = 1 + idx = [it.multi_index for x in it] + + if (comm.rank == 0) and (comm.size > 1): + w = 0 + p = comm.size -1 + for d in range(1, comm.size): + print(w) + if w < len(idx): + comm.send(idx[w], dest=d, tag=worktag) + w += 1 + else: + comm.send(None, dest=d, tag=worktag) + p = w + + terminated = 0 + + while terminated < p: + try: + result = comm.recv(source=MPI.ANY_SOURCE, tag=MPI.ANY_TAG, status=status) + except Exception: + pass + + d = status.source + if result: + gV, sV, workidx = result + gwind[workidx] = gV + swind[workidx] = sV + #gwind[idx[w]], swind[idx[w]] = result + + if w < len(idx): + comm.send(idx[w], dest=d, tag=worktag) + w += 1 + else: + comm.send(None, dest=d, tag=worktag) + terminated += 1 + + elif (comm.rank != 0) and (comm.size > 1): + while True: + workidx = comm.recv(source=0, tag=worktag, status=status) + if workidx is None: + break + il, ic, ip, ir, iv = workidx + print(f"Processing {workidx}") + gradV, surfV = calculateWindField(lon, lat[il], pe[ip], pc[ic], + rm[ir], vfm[iv], thetaFm, beta, + profileType=profileType, + windFieldType=blmodel) + results = (np.max(np.abs(gradV)), np.max(surfV), workidx) + comm.send(results, dest=0, tag=resulttag) + + elif (comm.rank == 0) and (comm.size == 1): + for x in idx: + il, ic, ip, ir, iv = x + print(lat[il], pc[ic], pe[ip], rm[ir], vfm[iv]) + gradV, surfV = calculateWindField(lon, lat[il], pe[ip], pc[ic], + rm[ir], vfm[iv], thetaFm, beta, + profileType=profileType, + windFieldType=blmodel) + gwind[x] = np.max(np.abs(gradV)) + swind[x] = np.max(surfV) + + comm.barrier() + + coords = [ + ("latitude", lat, dict(long_name="Latitude", + units="degrees_south")), + ("pcentre", pc, dict(long_name="Central pressure", + units="hPa")), + ("penv", pe, dict(long_name="Environmental pressure", + units="hPa")), + ("rmax", rm, dict(long_name="Radius to maximum winds", + units="km")), + ("vfm", vfm, dict(long_name="Forward speed", + units="km/h")) + ] + + dims = ["latitude", 'pcentre', 'penv', 'rmax', 'vfm'] + gattrs = { + "long_name": "Gradient level wind speed", + "profile": profileType, + "blmodel": blmodel, + "description": "maximum gradient level wind speed", + "units": "m s-1", + } + sattrs = { + "long_name": "Surface wind speed", + "profile": profileType, + "blmodel": blmodel, + "description": "maximum 0.2-s wind gust", + "units": "m s-1", + } + + if comm.rank == 0: + gda = xr.DataArray(gwind, dims=dims, coords=coords, attrs=gattrs) + sda = xr.DataArray(swind, dims=dims, coords=coords, attrs=sattrs) + ds = xr.Dataset() + ds['gradwind'] = gda + ds['surfwind'] = sda + ds.to_netcdf("output.nc") + + MPI.Finalize() + +if __name__ == '__main__': + print("Starting") + global MPI, comm + print("Initialiszing MPI") + MPI = attemptParallel() + #import atexit + #atexit.register(MPI.Finalize) + comm = MPI.COMM_WORLD + + print("Executing run()") + run() + + #MPI.Finalize() diff --git a/PlotInterface/AutoPlotHazard.py b/PlotInterface/AutoPlotHazard.py index 0d2eb505..e16bb170 100644 --- a/PlotInterface/AutoPlotHazard.py +++ b/PlotInterface/AutoPlotHazard.py @@ -40,7 +40,6 @@ from PlotInterface.curves import saveHazardCurve import sqlite3 -import unicodedata log = logging.getLogger(__name__) log.addHandler(logging.NullHandler()) @@ -225,7 +224,7 @@ def plotHazardCurves(self, inputFile, plotPath): log.debug("Saving hazard curve for %s to %s"%(name, filename)) wspd = ncobj.variables['wspd'][:, j, i] - recs = database.locationRecords(self.db, pID) + recs = database.queries.locationRecords(self.db, pID) data = np.zeros(int(self.numsimulations * 365.25)) if len(recs) > 0: data[-len(recs):] = recs['wspd'] diff --git a/ProcessMultipliers/processMultipliers.py b/ProcessMultipliers/processMultipliers.py index be86bc6d..0b187c36 100755 --- a/ProcessMultipliers/processMultipliers.py +++ b/ProcessMultipliers/processMultipliers.py @@ -53,35 +53,34 @@ """ -from shutil import copyfile, rmtree import glob +import logging as log +import math import os -from os.path import join as pjoin, dirname, realpath, isdir, splitext +import queue +import tempfile +import threading import time -import logging as log -import argparse import traceback +from concurrent import futures from functools import wraps, reduce +from os.path import join as pjoin, dirname, realpath, isdir, splitext +from shutil import copyfile -from Utilities.files import flStartLog -from Utilities.config import ConfigParser -from Utilities import pathLocator -from Utilities.AsyncRun import AsyncRun - +import argparse +import boto3 import numpy as np import numpy.ma as ma - +from botocore.exceptions import ClientError +from netCDF4 import Dataset from osgeo import osr, gdal, gdalconst from osgeo.gdal_array import BandReadAsArray, CopyDatasetInfo, BandWriteArray -from netCDF4 import Dataset +from Utilities import pathLocator +from Utilities.AsyncRun import AsyncRun +from Utilities.config import ConfigParser +from Utilities.files import flStartLog -import boto3 -from botocore.exceptions import ClientError -import tempfile -import math -import threading -from concurrent import futures threadLock_gust = threading.Lock() threadLock_bear = threading.Lock() threadLock_m4 = threading.Lock() @@ -955,7 +954,7 @@ def processMultV2(wspd, uu, vv, lon, lat, working_dir, dirns, log.debug('Create rasters from the netcdf gust file variables') wind_raster_file = pjoin(working_dir, 'region_wind.tif') wind_raster = createRaster(np.flipud(wspd), lon, lat, delta, delta, - filename = wind_raster_file) + filename=wind_raster_file) bear_raster = createRaster(np.flipud(bearing), lon, lat, delta, delta) uu_raster = createRaster(np.flipud(uu), lon, lat, delta, delta) vv_raster = createRaster(np.flipud(vv), lon, lat, delta, delta) @@ -975,19 +974,15 @@ def processMultV2(wspd, uu, vv, lon, lat, working_dir, dirns, future_requests = [] with futures.ThreadPoolExecutor(max_workers=max_working_threads) as e: - m4_max_file_obj=gdal.Open(m4_max_file, gdal.GA_ReadOnly) - thread_wind = e.submit(reprojectDataset, wind_raster, m4_max_file_obj, wind_prj_file, - warp_memory_limit=warp_memory_limit) - thread_bear = e.submit(reprojectDataset, bear_raster, m4_max_file_obj, bear_prj_file, - warp_memory_limit=warp_memory_limit, - resampling_method=gdalconst.GRA_NearestNeighbour) - futures.wait([thread_bear]) - thread_bear.result() # Called to obtain exception information if any + m4_max_file_obj = gdal.Open(m4_max_file, gdal.GA_ReadOnly) + reprojectDataset(wind_raster, m4_max_file_obj, wind_prj_file, + warp_memory_limit=warp_memory_limit) + reprojectDataset(bear_raster, m4_max_file_obj, bear_prj_file, + warp_memory_limit=warp_memory_limit, + resampling_method=gdalconst.GRA_NearestNeighbour) future_requests.append(e.submit(reprojectDataset, uu_raster, m4_max_file_obj, uu_prj_file, warp_memory_limit=warp_memory_limit, resampling_method=gdalconst.GRA_NearestNeighbour)) - futures.wait([thread_wind]) # Writing wind is slow as it has to write region_wind.tif as well - thread_wind.result() # Called to obtain exception information if any future_requests.append(e.submit(reprojectDataset, vv_raster, m4_max_file_obj, vv_prj_file, warp_memory_limit=warp_memory_limit, resampling_method=gdalconst.GRA_NearestNeighbour)) @@ -1008,11 +1003,12 @@ def processMultV2(wspd, uu, vv, lon, lat, working_dir, dirns, # multipliers drv = gdal.GetDriverByName("GTiff") dst_ds = drv.Create(output_file, cols, rows, 1, - gdal.GDT_Float32, ['SPARSE_OK=TRUE']) + gdal.GDT_Float32, ['BIGTIFF=YES', 'SPARSE_OK=TRUE']) dst_ds.SetGeoTransform(wind_geot) dst_ds.SetProjection(wind_proj) dst_band = dst_ds.GetRasterBand(1) dst_band.SetNoDataValue(-9999) + print('processMultV2', dst_ds.GetProjection()) log.info("Reading bands") source_dir_bands = [] @@ -1033,28 +1029,33 @@ def processMultV2(wspd, uu, vv, lon, lat, working_dir, dirns, total_segments = int(math.ceil(1.0 * cols / processing_segment_size) * math.ceil(1.0 * rows / processing_segment_size)) segment_count = 0 - segments = [] + segment_queue = queue.Queue(total_segments) for y_offset in range(0, rows, processing_segment_size): height = rows - y_offset if y_offset + processing_segment_size > rows else processing_segment_size for x_offset in range(0, cols, processing_segment_size): segment_count = segment_count + 1 width = cols - x_offset if x_offset + processing_segment_size > cols else processing_segment_size - segments.append([x_offset, y_offset, width, height, segment_count, total_segments]) + segment_queue.put([x_offset, y_offset, width, height, segment_count, total_segments]) log.info("Lunching {0} segmented task in {1} worker threads".format(total_segments, max_working_threads)) - for seg in segments: - future_requests.append(e.submit(processMultiplierSegment, seg, source_dir_bands, wind_prj, bear_prj, dst_band)) + for _ in range(max_working_threads): + future_requests.append(e.submit(call_process_multiplier_segment, segment_queue, source_dir_bands, wind_prj, bear_prj, dst_band)) + futures.wait(future_requests, return_when='FIRST_EXCEPTION') for task in future_requests: - task.result() # Called to obtain exception information if any + task.result() # Called to obtain exception information if any + dst_ds.FlushCache() + del dst_ds - del dst_ds - print("") log.info("Completed") - return output_file +def call_process_multiplier_segment(segment_queue, source_dir_band, wind_prj, bear_prj, dst_band): + while not segment_queue.empty(): + processMultiplierSegment(segment_queue.get(), source_dir_band, wind_prj, bear_prj, dst_band) + dst_band.FlushCache() + def processMultiplierSegment(segment, source_dir_band, wind_prj, bear_prj, dst_band): """ Calculates local wind multiplier data by image segments @@ -1081,8 +1082,6 @@ def processMultiplierSegment(segment, source_dir_band, wind_prj, bear_prj, dst_b 8: {'dir': 'n', 'min': 337.5, 'max': 360.} } [x_offset, y_offset, width, height, segment_id, total_segments] = segment - log.debug("Processing segment {0}/{1}: {2} {3} {4} {5}" - .format(segment_id, total_segments, x_offset, y_offset, width, height)) with threadLock_gust: wind_data = wind_prj.ReadAsArray(x_offset, y_offset, width, height) with threadLock_bear: @@ -1096,11 +1095,9 @@ def processMultiplierSegment(segment, source_dir_band, wind_prj, bear_prj, dst_b local[idx] = wind_data[idx] * m4[idx] with threadLock_out: dst_band.WriteArray(local, x_offset, y_offset) - print('\rProgress: {0:.2f}'.format((segment_id * 100) / total_segments), "%", end="") - if segment_id % int(math.ceil(total_segments / 20)) == 0: - if log.getLogger(__name__).getEffectiveLevel() == log.DEBUG: - print("") - log.debug('Progress: {0} %'.format(int((segment_id * 100) / total_segments))) + if segment_id % int(math.ceil(total_segments / 100.0)) == 0: + dst_band.FlushCache() + log.info('Progress: {0:.2f} %'.format((segment_id * 100.0) / total_segments)) class run(): diff --git a/README.rst b/README.rst index 0640a71b..392b7acb 100644 --- a/README.rst +++ b/README.rst @@ -79,8 +79,8 @@ Status :target: https://landscape.io/github/GeoscienceAustralia/tcrm/master :alt: Code Health -.. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.4070660.svg - :target: https://doi.org/10.5281/zenodo.4070660 +.. image:: https://zenodo.org/badge/10637300.svg + :target: https://zenodo.org/badge/latestdoi/10637300 Screenshot ========== diff --git a/Utilities/loadData.py b/Utilities/loadData.py index 94f9124e..a2eb5d79 100644 --- a/Utilities/loadData.py +++ b/Utilities/loadData.py @@ -194,7 +194,7 @@ def getSpeedBearing(index, lon, lat, deltatime, ieast=1, speed = dist / deltatime # Delete speeds less than 0, greated than 200, # or where indicator == 1. - np.putmask(speed, (speed < 0), missingValue) + np.putmask(speed, (speed < 0), missingValue) np.putmask(speed, (speed > 200), missingValue) np.putmask(speed, index, missingValue) np.putmask(speed, np.isnan(speed), missingValue) @@ -360,7 +360,6 @@ def getInitialPositions(data): except ValueError: LOG.error("'num' field cannot be converted to an integer") - raise KeyError(('Insufficient input file columns have been specified' 'Check the input file has enough fields to determine' 'TC starting positions')) diff --git a/convergenceTest.py b/convergenceTest.py index 30d7d87d..1b0d86bf 100644 --- a/convergenceTest.py +++ b/convergenceTest.py @@ -54,8 +54,6 @@ # Load the configuration file from the TCHA18, then open the database # and get teh list of available locations. -# In[2]: - configFile = "/home/547/cxa547/tcrmconfig/tcrm2.1.ini" config = ConfigParser() config.read(configFile) @@ -69,7 +67,6 @@ # The following step performs the calculations. First a helper # function to add nicely formatted grid lines on a logarithmic axis. -# # The second function (`plotConvergenceTest`) loads the data from the # database, then splits into two separate collections (called `d1` and # `d2`). For each of these, we then calculate empirical ARI values and @@ -88,7 +85,7 @@ def addARIGrid(axes): axes.autoscale(True, axis='x', tight=True) axes.grid(True, which='major', linestyle='-') axes.grid(True, which='minor', linestyle='--', linewidth=0.5) - + def addAEPGrid(axes): """ Add a logarithmic graticuyle to the subplot axes @@ -99,7 +96,7 @@ def addAEPGrid(axes): axes.autoscale(True, axis='y', tight=True) axes.grid(True, which='major', linestyle='-') axes.grid(True, which='minor', linestyle='--', linewidth=0.5) - + def calculateARI(data, years): emprp = empReturnPeriod(np.sort(data)) return np.sort(data)[-years:], emprp[-years:] @@ -117,7 +114,7 @@ def plotConvergenceTest(locName): locLon = locations['locLon'][locations['locId']==locId][0] locLat = locations['locLat'][locations['locId']==locId][0] - records = database.locationRecords(db, str(locId)) + records = database.queries.locationRecords(db, str(locId)) recs = records['wspd'][records['wspd'] > 0] data = np.zeros(int(NumSimulations*365.25)) data[-len(recs):] = recs @@ -133,7 +130,7 @@ def plotConvergenceTest(locName): fdelta = delta/mn fig, ax1 = plt.subplots(1, 1, figsize=figsize) - + ax1.fill_between(rr[0,:], dd[1,:], dd[0,:], alpha=0.5, label="95th percentile") ax1.plot(emprp[-10000:], data[-10000:], color='k', label="Mean ARI") ax1.set_xscale('log') @@ -213,7 +210,7 @@ def plotConvergence(ax, locName): locLon = locations['locLon'][locations['locId']==locId][0] locLat = locations['locLat'][locations['locId']==locId][0] - records = database.locationRecords(db, str(locId)) + records = database.queries.locationRecords(db, str(locId)) recs = records['wspd'][records['wspd'] > 0] data = np.zeros(int(NumSimulations*365.25)) data[-len(recs):] = recs @@ -246,5 +243,5 @@ def plotConvergence(ax, locName): axlist[7].set_xlabel('Average recurrence interval (years)') fig.tight_layout() -plt.savefig(os.path.join(plotPath, "ARI_convergence.png"), +plt.savefig(os.path.join(plotPath, "ARI_convergence.png"), bbox_inches='tight') diff --git a/database/__init__.py b/database/__init__.py index ca703441..c4eb9411 100644 --- a/database/__init__.py +++ b/database/__init__.py @@ -38,14 +38,14 @@ import logging import sqlite3 from sqlite3 import PARSE_DECLTYPES, PARSE_COLNAMES, IntegrityError -from functools import wraps +from functools import wraps, reduce import time from datetime import datetime import unicodedata import re +import atexit from shapely.geometry import Point -logging.getLogger('shapely').setLevel(logging.WARNING) from netCDF4 import Dataset import numpy as np @@ -54,12 +54,17 @@ from Utilities.track import loadTracksFromFiles from Utilities.parallel import attemptParallel, disableOnWorkers from Utilities.process import pAlreadyProcessed, pGetProcessedFiles -from functools import reduce -sqlite3.register_adapter(np.int64, lambda val: int(val)) -sqlite3.register_adapter(np.int32, lambda val: int(val)) +from .definitions import (TBLLOCATIONDEF, TBLEVENTSDEF, TBLWINDSPEEDDEF, + TBLHAZARDDEF, TBLTRACKSDEF, INSLOCATIONS, + INSEVENTS, INSWINDSPEED, INSHAZARD, INSTRACK, + SELECTLOCATIONS) + +sqlite3.register_adapter(np.int64, int) +sqlite3.register_adapter(np.int32, int) log = logging.getLogger(__name__) log.addHandler(logging.NullHandler()) +logging.getLogger('shapely').setLevel(logging.WARNING) def fromrecords(records, names): """ Convert records to array, even if no data """ @@ -90,70 +95,6 @@ def wrap(*args, **kwargs): # pylint: disable=R0914,R0902 -# Table definition statements -# Stations - we assume a geographic coordinate system: -TBLLOCATIONDEF = ("CREATE TABLE IF NOT EXISTS tblLocations " - "(locId integer PRIMARY KEY, locCode text, " - "locName text, locType text, locLon real, " - "locLat real, locElev real, locCountry text, " - "locSource text, Comments text, " - "dtCreated timestamp)") - -# Events: -TBLEVENTSDEF = ("CREATE TABLE IF NOT EXISTS tblEvents " - "(eventNumber integer PRIMARY KEY, eventId text, " - "eventFile text, eventTrackFile text, " - "eventMaxWind real, eventMinPressure real, " - "dtTrackFile timestamp, dtWindfieldFile timestamp, " - "tcrmVersion text, Comments text, dtCreated timestamp)") - -#Station wind speed from events: -TBLWINDSPEEDDEF = ("CREATE TABLE IF NOT EXISTS tblWindSpeed " - "(locId integer, eventId text, wspd real, umax real, " - "vmax real, pmin real, Comments text, " - "dtCreated timestamp)") - -# Station hazard levels: -TBLHAZARDDEF = ("CREATE TABLE IF NOT EXISTS tblHazard " - "(locId integer, returnPeriod real, wspd real, " - " wspdUpper real, wspdLower real, loc real, " - "scale real, shape real, tcrmVersion text, " - "dtHazardFile timestamp, Comments text, " - "dtCreated timestamp)") - -# Proximity of tracks to stations: -TBLTRACKSDEF = ("CREATE TABLE IF NOT EXISTS tblTracks " - "(locId integer, eventId text, distClosest real, " - "prsClosest real, dtClosest timestamp, Comments text, " - "dtCreated timestamp)") - -# Insert statements: -# Insert locations: -INSLOCATIONS = ("INSERT OR REPLACE INTO tblLocations " - "VALUES (?,?,?,?,?,?,?,?,?,?,?)") - -# Insert event record: -INSEVENTS = "INSERT INTO tblEvents VALUES (?,?,?,?,?,?,?,?,?,?,?)" - -# Insert wind speed record: -INSWINDSPEED = ("INSERT INTO tblWindSpeed " - "VALUES (?,?,?,?,?,?,?,?)") - -# Insert hazard record: -INSHAZARD = "INSERT INTO tblHazard VALUES (?,?,?,?,?,?,?,?,?,?,?,?)" - -# Insert track record: -INSTRACK = "INSERT INTO tblTracks VALUES (?,?,?,?,?,?,?)" - -# Select statements; -# Select locations within domain: -SELECTLOCATIONS = ("SELECT * FROM tblLocations WHERE " - "locLon >= ? and locLon <= ? and " - "locLat >= ? and locLat <= ?") - -# Select locId, locLon & locLat from the subset of locations: -SELECTLOCLONLAT = "SELECT locId, locLon, locLat FROM tblLocations " - def windfieldAttributes(ncobj): """ Extract the required attributes from a netCDF file. @@ -192,7 +133,7 @@ def HazardDatabase(configFile): # pylint: disable=C0103 :param str configFile: Path to configuration file """ - global _singletons + global _singletons # pylint: disable=W0603 instance = _singletons.get(configFile) if not instance: instance = _HazardDatabase(configFile) @@ -232,8 +173,6 @@ def __init__(self, configFile): detect_types=PARSE_DECLTYPES|PARSE_COLNAMES) self.exists = True - - import atexit atexit.register(self.close) @disableOnWorkers @@ -250,7 +189,6 @@ def createDatabase(self): self.createTable('tblTracks', TBLTRACKSDEF) self.exists = True self.commit() - return @disableOnWorkers def createTable(self, tblName, tblDef): @@ -317,7 +255,7 @@ def getLocations(self): locations = cur.fetchall() locations = fromrecords(locations, - names=("locId,locName,locLon,locLat")) + names=("locId,locName,locLon,locLat")) return locations def generateEventTable(self): @@ -489,25 +427,6 @@ def processEvents(self): self.insertEvents(eventparams) self.insertWindSpeeds(wsparams) - - def loadWindfieldFile(self, ncobj): - """ - Load an individual dataset. - - :param str filename: filename to load. - - :returns: tuple containing longitude, latitude, wind speed, - eastward and northward components and pressure grids. - """ - lon = ncobj.variables['lon'][:] - lat = ncobj.variables['lat'][:] - vmax = ncobj.variables['vmax'][:] - ua = ncobj.variables['ua'][:] - va = ncobj.variables['va'][:] - pmin = ncobj.variables['slp'][:] - - return (lon, lat, vmax, ua, va, pmin) - def processEvent(self, filename, locations, eventNum): """ Process an individual event file @@ -522,9 +441,13 @@ def processEvent(self, filename, locations, eventNum): log.debug("Event ID: {0}".format(eventId)) try: ncobj = Dataset(pjoin(self.windfieldPath, filename)) - except: - log.warn("Cannot open {0}".\ + except IOError as excmsg: + log.warning("Cannot open {0}".\ format(pjoin(self.windfieldPath, filename))) + log.warning(excmsg) + except: + log.exception(("Failed trying to open " + f"{pjoin(self.windfieldPath, filename)}")) # First perform the event update for tblEvents: fname = pjoin(self.windfieldPath, filename) @@ -540,7 +463,7 @@ def processEvent(self, filename, locations, eventNum): "", datetime.now()) # Perform update for tblWindSpeed: - lon, lat, vmax, ua, va, pmin = self.loadWindfieldFile(ncobj) + lon, lat, vmax, ua, va, pmin = loadWindfieldFile(ncobj) ncobj.close() wsparams = list() @@ -662,8 +585,8 @@ def processTracks(self): try: result = comm.recv(source=MPI.ANY_SOURCE, tag=MPI.ANY_TAG, status=status) - except: - log.warn("Problems recieving results on node 0") + except Exception: + log.warning("Problems recieving results on node 0") d = status.source if result: @@ -720,6 +643,24 @@ def insertTracks(self, trackRecords): log.debug("Inserted {0} records into tblTracks".format(len(trackRecords))) +def loadWindfieldFile(ncobj): + """ + Load an individual dataset. + + :param str filename: filename to load. + + :returns: tuple containing longitude, latitude, wind speed, + eastward and northward components and pressure grids. + """ + lon = ncobj.variables['lon'][:] + lat = ncobj.variables['lat'][:] + vmax = ncobj.variables['vmax'][:] + ua = ncobj.variables['ua'][:] + va = ncobj.variables['va'][:] + pmin = ncobj.variables['slp'][:] + + return (lon, lat, vmax, ua, va, pmin) + def processTrack(trackfile, locations): """ Process individual track to determine distance to locations, etc. @@ -767,7 +708,7 @@ def run(configFile): location_file = config.get('Input', 'LocationFile') buildLocationDatabase(location_db, location_file) - global MPI, comm + global MPI, comm # pylint: disable=W0601 MPI = attemptParallel() comm = MPI.COMM_WORLD db = HazardDatabase(configFile) @@ -875,219 +816,3 @@ def buildLocationDatabase(location_db, location_file, location_type='AWS'): locdb.executemany(INSLOCATIONS, locations) locdb.commit() locdb.close() - -@timer -def locationRecordsExceeding(hazard_db, locId, windSpeed): - """ - Select all records where the wind speed at the given location is - greater than some threshold. - - :param hazard_db: :class:`HazardDatabase` instance. - :param int locId: Location identifier. - :param float windSpeed: Select all records where the wind speed - at the given location is greater than - this value. - - :returns: :class:`numpy.recarray` containing the name, longitude - & latitude of the location, the wind speed of the - record, the event Id and the event file that holds the - event that generated the wind speed. - - Example:: - - >>> db = HazardDatabase(configFile) - >>> locId = 00001 - >>> records = locationRecordsExceeding(db, locId, 47.) - - """ - - query = ("SELECT l.locId, l.locName, w.wspd, w.eventId " - "FROM tblLocations l " - "INNER JOIN tblWindSpeed w ON l.locId = w.locId " - "WHERE w.wspd > ? and l.locId = ? " - "ORDER BY w.wspd ASC") - - cur = hazard_db.execute(query, (windSpeed, locId,)) - results = cur.fetchall() - results = fromrecords(results, names=('locId,locName,wspd,eventId')) - - return results - -@timer -def locationRecords(hazard_db, locId): - """ - Select all wind speed records for a given location. - - :param hazard_db: :class:`HazardDatabase` instance. - :param int locId: Location identifier. - - :returns: :class:`numpy.recarray` containing the location id, location - name, wind speed and event id. - - """ - - query = ("SELECT w.locId, l.locName, w.wspd, w.umax, w.vmax, w.eventId " - "FROM tblWindSpeed w " - "INNER JOIN tblLocations l " - "ON w.locId = l.locId " - "WHERE l.locId = ? ORDER BY w.wspd ASC") - cur = hazard_db.execute(query, (locId,)) - results = cur.fetchall() - results = fromrecords(results, - names=('locId,locName,wspd,umax,vmax,eventId')) - - return results - -@timer -def locationPassage(hazard_db, locId, distance=50): - """ - Select all records from tblTracks that pass within a defined - distance of the given location - - :param hazard_db: :class:`HazardDatabase` instance. - :param int locId: Location identifier. - :param distance: Distance threshold (in kilometres). - - :returns: :class:`numpy.recarray` containing the location id, location - name, event id, closest distance of approach, wind speed and - event file for all events that pass within the defined - distance of the selected location. - - Example:: - - >>> db = HazardDatabase(configFile) - >>> locId = 000001 - >>> records = locationPassage(db, locId, 50) - - """ - - query = ("SELECT l.locId, l.locName, t.eventId, t.distClosest, " - "w.wspd, e.eventFile FROM tblLocations l " - "INNER JOIN tblTracks t " - "ON l.locId = t.locId " - "JOIN tblWindSpeed w on w.eventId = t.eventId " - "JOIN tblEvents e on e.eventId = t.eventId " - "WHERE t.distClosest < ? and l.locId = ?") - cur = hazard_db.execute(query, (distance, locId)) - results = cur.fetchall() - results = fromrecords(results, - names=('locId,locName,eventId,' - 'distClosest,wspd,eventFile')) - return results - -@timer -def locationPassageWindSpeed(hazard_db, locId, speed, distance): - """ - Select records from _tblWindSpeed_, _tblTracks_ and _tblEvents_ that - generate a defined wind speed and pass within a given distance - of the location. - - :param hazard_db: :class:`HazardDatabase` instance. - :param int locId: Location identifier. - :param float speed: Minimum wind speed (m/s). - :param float distance: Distance threshold (kilometres). - - """ - - query = ("SELECT l.locName, w.wspd, w.umax, w.vmax, w.eventId, " - "t.distClosest, e.eventMaxWind, e.eventMinPressure " - "FROM tblLocations l " - "JOIN tblWindSpeed w on l.locId = w.locId " - "JOIN tblEvents e ON e.eventId = w.eventId " - "JOIN tblTracks t ON w.locId = t.locId AND w.eventId = t.eventId " - "WHERE l.locId = ? and w.wspd > ? AND t.distClosest <= ? " - "ORDER BY w.wspd ASC") - - cur = hazard_db.execute(query, (locId, speed, distance)) - results = cur.fetchall() - results = fromrecords(results, - names=('locName,wspd,umax,vmax,eventId,' - 'distClosest,maxwind,pmin')) - - return results - -@timer -def locationReturnPeriodEvents(hazard_db, locId, return_period): - """ - Select all records from tblEvents where the wind speed is - greater than the return period wind speed for the given return period. - - :param hazard_db: :class:`HazardDatabase` instance. - :param int locId: Location identifier. - :param int return_period: Nominated return period. - - :returns: :class:`numpy.recarray` of location id and wind speeds of - all events that are greater than the return level of the - nominated return period. - - The following example would return the wind speeds of all events that - exceed the 500-year return period wind speed for the selected location. - - Example:: - - >>> db = HazardDatabase(configFile) - >>> locId = 000001 - >>> records = locationReturnPeriodEvents(db, locId, 500) - - """ - - query = ("SELECT l.locId, h.wspd FROM tblLocations l " - "INNER JOIN tblHazard h ON l.locId = h.locId " - "WHERE h.returnPeriod = ? and l.locId = ?") - cur = hazard_db.execute(query, (return_period, locId)) - row = cur.fetchall() - return_level = row[0][1] - results = locationRecordsExceeding(hazard_db, locId, return_level) - - return results - -@timer -def locationAllReturnLevels(hazard_db, locId): - """ - Select all return level wind speeds (including upper and lower - confidence intervals) for a selected location. - - :param hazard_db: :class:`HazardDatabase` instance. - :param int locId: Location identifier. - - :returns: :class:`numpy.recarray` containing the location id, location - name, return period, return period windspeed and lower/upper - estimates of the return period wind speed. - - """ - - query = ("SELECT l.locId, l.locName, h.returnPeriod, h.wspd, " - "h.wspdLower, h.wspdUpper " - "FROM tblLocations l INNER JOIN tblHazard h " - "ON l.locId = h.locId " - "WHERE l.locId = ? " - "ORDER BY h.returnPeriod") - - cur = hazard_db.execute(query, (locId,)) - results = cur.fetchall() - results = fromrecords(results, - names=('locId,locName,returnPeriod,' - 'wspd,wspdLower,wspdUpper')) - - return results - -@timer -def selectEvents(hazard_db): - """ - Select all events from _tblEvents_. - - :param hazard_db: :class:`HazardDatabase` instance. - - :returns: :class:`numpy.recarray` containing the full listing of each - event in the table. - - """ - - query = "SELECT * FROM tblEvents ORDER BY eventMaxWind ASC" - cur = hazard_db.execute(query) - results = cur.fetchall() - names = ("eventNum,eventId,eventFile,eventTrackFile,eventMaxWind," - "eventMinPressure,dtTrackFile,dtWindfieldFile,tcrmVer," - "Comments,dtCreated") - results = fromrecords(results, names=names) - return results diff --git a/database/definitions.py b/database/definitions.py new file mode 100644 index 00000000..eb0321df --- /dev/null +++ b/database/definitions.py @@ -0,0 +1,74 @@ +""" +:mod:`definitions` -- table and statement definitions +===================================================== + +.. module:: definitions + :synopsis: Table definitions, insert statements and + query statements for the database module. +.. moduleauthor:: Craig Arthur + +""" + +# Table definition statements +# Stations - we assume a geographic coordinate system: +TBLLOCATIONDEF = ("CREATE TABLE IF NOT EXISTS tblLocations " + "(locId integer PRIMARY KEY, locCode text, " + "locName text, locType text, locLon real, " + "locLat real, locElev real, locCountry text, " + "locSource text, Comments text, " + "dtCreated timestamp)") + +# Events: +TBLEVENTSDEF = ("CREATE TABLE IF NOT EXISTS tblEvents " + "(eventNumber integer PRIMARY KEY, eventId text, " + "eventFile text, eventTrackFile text, " + "eventMaxWind real, eventMinPressure real, " + "dtTrackFile timestamp, dtWindfieldFile timestamp, " + "tcrmVersion text, Comments text, dtCreated timestamp)") + +#Station wind speed from events: +TBLWINDSPEEDDEF = ("CREATE TABLE IF NOT EXISTS tblWindSpeed " + "(locId integer, eventId text, wspd real, umax real, " + "vmax real, pmin real, Comments text, " + "dtCreated timestamp)") + +# Station hazard levels: +TBLHAZARDDEF = ("CREATE TABLE IF NOT EXISTS tblHazard " + "(locId integer, returnPeriod real, wspd real, " + " wspdUpper real, wspdLower real, loc real, " + "scale real, shape real, tcrmVersion text, " + "dtHazardFile timestamp, Comments text, " + "dtCreated timestamp)") + +# Proximity of tracks to stations: +TBLTRACKSDEF = ("CREATE TABLE IF NOT EXISTS tblTracks " + "(locId integer, eventId text, distClosest real, " + "prsClosest real, dtClosest timestamp, Comments text, " + "dtCreated timestamp)") + +# Insert statements: +# Insert locations: +INSLOCATIONS = ("INSERT OR REPLACE INTO tblLocations " + "VALUES (?,?,?,?,?,?,?,?,?,?,?)") + +# Insert event record: +INSEVENTS = "INSERT INTO tblEvents VALUES (?,?,?,?,?,?,?,?,?,?,?)" + +# Insert wind speed record: +INSWINDSPEED = ("INSERT INTO tblWindSpeed " + "VALUES (?,?,?,?,?,?,?,?)") + +# Insert hazard record: +INSHAZARD = "INSERT INTO tblHazard VALUES (?,?,?,?,?,?,?,?,?,?,?,?)" + +# Insert track record: +INSTRACK = "INSERT INTO tblTracks VALUES (?,?,?,?,?,?,?)" + +# Select statements; +# Select locations within domain: +SELECTLOCATIONS = ("SELECT * FROM tblLocations WHERE " + "locLon >= ? and locLon <= ? and " + "locLat >= ? and locLat <= ?") + +# Select locId, locLon & locLat from the subset of locations: +SELECTLOCLONLAT = "SELECT locId, locLon, locLat FROM tblLocations " diff --git a/database/queries.py b/database/queries.py new file mode 100644 index 00000000..6e93edba --- /dev/null +++ b/database/queries.py @@ -0,0 +1,250 @@ +import time +import logging as log +from functools import wraps, reduce + +import numpy as np + +def fromrecords(records, names): + """ Convert records to array, even if no data """ + # May become redundant after https://github.com/numpy/numpy/issues/1862 + if records: + rval = np.rec.fromrecords(records, names=names) + else: + rval = np.array([], [(name, 'O') for name in names.split(',')]) + + return rval + +def timer(func): + """ + A simple timing decorator for the entire process. + + """ + + @wraps(func) + def wrap(*args, **kwargs): + t1 = time.time() + res = func(*args, **kwargs) + tottime = time.time() - t1 + msg = "%02d:%02d:%02d " % \ + reduce(lambda ll, b: divmod(ll[0], b) + ll[1:], + [(tottime,), 60, 60]) + log.debug("Time for {0}: {1}".format(func.__name__, msg)) + return res + + return wrap + +@timer +def locationRecordsExceeding(hazard_db, locId, windSpeed): + """ + Select all records where the wind speed at the given location is + greater than some threshold. + + :param hazard_db: :class:`HazardDatabase` instance. + :param int locId: Location identifier. + :param float windSpeed: Select all records where the wind speed + at the given location is greater than + this value. + + :returns: :class:`numpy.recarray` containing the name, longitude + & latitude of the location, the wind speed of the + record, the event Id and the event file that holds the + event that generated the wind speed. + + Example:: + + >>> db = HazardDatabase(configFile) + >>> locId = 00001 + >>> records = locationRecordsExceeding(db, locId, 47.) + + """ + + query = ("SELECT l.locId, l.locName, w.wspd, w.eventId " + "FROM tblLocations l " + "INNER JOIN tblWindSpeed w ON l.locId = w.locId " + "WHERE w.wspd > ? and l.locId = ? " + "ORDER BY w.wspd ASC") + + cur = hazard_db.execute(query, (windSpeed, locId,)) + results = cur.fetchall() + results = fromrecords(results, names=('locId,locName,wspd,eventId')) + + return results + +@timer +def locationRecords(hazard_db, locId): + """ + Select all wind speed records for a given location. + + :param hazard_db: :class:`HazardDatabase` instance. + :param int locId: Location identifier. + + :returns: :class:`numpy.recarray` containing the location id, location + name, wind speed and event id. + + """ + + query = ("SELECT w.locId, l.locName, w.wspd, w.umax, w.vmax, w.eventId " + "FROM tblWindSpeed w " + "INNER JOIN tblLocations l " + "ON w.locId = l.locId " + "WHERE l.locId = ? ORDER BY w.wspd ASC") + cur = hazard_db.execute(query, (locId,)) + results = cur.fetchall() + results = fromrecords(results, + names=('locId,locName,wspd,umax,vmax,eventId')) + + return results + +@timer +def locationPassage(hazard_db, locId, distance=50): + """ + Select all records from tblTracks that pass within a defined + distance of the given location + + :param hazard_db: :class:`HazardDatabase` instance. + :param int locId: Location identifier. + :param distance: Distance threshold (in kilometres). + + :returns: :class:`numpy.recarray` containing the location id, location + name, event id, closest distance of approach, wind speed and + event file for all events that pass within the defined + distance of the selected location. + + Example:: + + >>> db = HazardDatabase(configFile) + >>> locId = 000001 + >>> records = locationPassage(db, locId, 50) + + """ + + query = ("SELECT l.locId, l.locName, t.eventId, t.distClosest, " + "w.wspd, e.eventFile FROM tblLocations l " + "INNER JOIN tblTracks t " + "ON l.locId = t.locId " + "JOIN tblWindSpeed w on w.eventId = t.eventId " + "JOIN tblEvents e on e.eventId = t.eventId " + "WHERE t.distClosest < ? and l.locId = ?") + cur = hazard_db.execute(query, (distance, locId)) + results = cur.fetchall() + results = fromrecords(results, + names=('locId,locName,eventId,' + 'distClosest,wspd,eventFile')) + return results + +@timer +def locationPassageWindSpeed(hazard_db, locId, speed, distance): + """ + Select records from _tblWindSpeed_, _tblTracks_ and _tblEvents_ that + generate a defined wind speed and pass within a given distance + of the location. + + :param hazard_db: :class:`HazardDatabase` instance. + :param int locId: Location identifier. + :param float speed: Minimum wind speed (m/s). + :param float distance: Distance threshold (kilometres). + + """ + + query = ("SELECT l.locName, w.wspd, w.umax, w.vmax, w.eventId, " + "t.distClosest, e.eventMaxWind, e.eventMinPressure " + "FROM tblLocations l " + "JOIN tblWindSpeed w on l.locId = w.locId " + "JOIN tblEvents e ON e.eventId = w.eventId " + "JOIN tblTracks t ON w.locId = t.locId AND w.eventId = t.eventId " + "WHERE l.locId = ? and w.wspd > ? AND t.distClosest <= ? " + "ORDER BY w.wspd ASC") + + cur = hazard_db.execute(query, (locId, speed, distance)) + results = cur.fetchall() + results = fromrecords(results, + names=('locName,wspd,umax,vmax,eventId,' + 'distClosest,maxwind,pmin')) + + return results + +@timer +def locationReturnPeriodEvents(hazard_db, locId, return_period): + """ + Select all records from tblEvents where the wind speed is + greater than the return period wind speed for the given return period. + + :param hazard_db: :class:`HazardDatabase` instance. + :param int locId: Location identifier. + :param int return_period: Nominated return period. + + :returns: :class:`numpy.recarray` of location id and wind speeds of + all events that are greater than the return level of the + nominated return period. + + The following example would return the wind speeds of all events that + exceed the 500-year return period wind speed for the selected location. + + Example:: + + >>> db = HazardDatabase(configFile) + >>> locId = 000001 + >>> records = locationReturnPeriodEvents(db, locId, 500) + + """ + + query = ("SELECT l.locId, h.wspd FROM tblLocations l " + "INNER JOIN tblHazard h ON l.locId = h.locId " + "WHERE h.returnPeriod = ? and l.locId = ?") + cur = hazard_db.execute(query, (return_period, locId)) + row = cur.fetchall() + return_level = row[0][1] + results = locationRecordsExceeding(hazard_db, locId, return_level) + + return results + +@timer +def locationAllReturnLevels(hazard_db, locId): + """ + Select all return level wind speeds (including upper and lower + confidence intervals) for a selected location. + + :param hazard_db: :class:`HazardDatabase` instance. + :param int locId: Location identifier. + + :returns: :class:`numpy.recarray` containing the location id, location + name, return period, return period windspeed and lower/upper + estimates of the return period wind speed. + + """ + + query = ("SELECT l.locId, l.locName, h.returnPeriod, h.wspd, " + "h.wspdLower, h.wspdUpper " + "FROM tblLocations l INNER JOIN tblHazard h " + "ON l.locId = h.locId " + "WHERE l.locId = ? " + "ORDER BY h.returnPeriod") + + cur = hazard_db.execute(query, (locId,)) + results = cur.fetchall() + results = fromrecords(results, + names=('locId,locName,returnPeriod,' + 'wspd,wspdLower,wspdUpper')) + + return results + +@timer +def selectEvents(hazard_db): + """ + Select all events from _tblEvents_. + + :param hazard_db: :class:`HazardDatabase` instance. + + :returns: :class:`numpy.recarray` containing the full listing of each + event in the table. + + """ + + query = "SELECT * FROM tblEvents ORDER BY eventMaxWind ASC" + cur = hazard_db.execute(query) + results = cur.fetchall() + names = ("eventNum,eventId,eventFile,eventTrackFile,eventMaxWind," + "eventMinPressure,dtTrackFile,dtWindfieldFile,tcrmVer," + "Comments,dtCreated") + results = fromrecords(results, names=names) + return results diff --git a/input/stationlist.dbf b/input/stationlist.dbf index 49cf4b445736c5513d40523d91c0517b272de3bd..7ce421c97354c97d3a1376415015946cafc8b68d 100644 GIT binary patch delta 908 zcmZ9GSyU7R7=;hZ2N+qBQKIFN*b2i8%)n5FqPY}UX`5Q9WR_NHi>0PzlApFHqEk*~ z8<;H`nhO>!N@`jer7c$CLUnrTy&m*ngyQjZ-~N04``!EJKTA@NS68KyG&?OTZ$6SB ziIODA(srgus-#ILIYZLrOzA9V$=T9HGUOcTD(6b3oG0f?H_4Lj(nBtg3#F&@lHSrs zE|R{IE&b$TxkN6N%j9yoLUJTm0+J^|2}!;bNLV6LC`A&LV!2YTlKyhFl*j-XD5Ww; z2Fo=vM25;R87|k#by6nfGD5DG8{|eADWl{jxmj+JTje$xEn{S?+%9*>oia|w%LJ*A zyJVu=EtBLPxmPC36uD3Cmj`63Oq1#IpgbfG%Of&F9+k&rrp%Jr^0>^Ax$=ZODf8qh znJ<;{v@8&1p)8VTE^o-2 zQX{ppO5T#y^0vGq@5+0!M%K#vQYY(Vy?h`WWTR}7%~CI0ZZX^(C>kOU9v_m4nKqwq3I*IwuYFB>ue*r62ERO&H delta 699 zcmWN^Q+pNw06^iV*6FLhu$Ij&E!$e&vTYj+ORG+pTDEO2+qSvt={Iy)y?5{5)Re&Q zKgj_?VuQiRcqD^lln}`z|C?C?l0~vgHVI00$ssuM z8cAbmB2A^4G?y0AQlg}lw3ar~R@zB>=^)V(BORrabe1mCRk}%c=^;I(m-LoC(pUOP ze;FVHWsnS(Au?2k$#5AVBW09~mN7C`#>se@AQNShOqMA!Ri?>wnIW+St80(StiS6g(OI#BuTQQ$Vy3-RkB*v$XZz^>t%y%lufc(w#Zi5 zCfj9)?37)yTlUCa*(dwufE<)Va#)VYQ8^~Z<%FD+Q*v6)$XQ8~b8=oT$VIs%m*t9F zm1}ZcZpcl!CAZ~{+?9KBUmnOqc_feJi9D5O@?2iXOL--)<&C_Rck*67$Vd4kpXG~u Rm2dK0(&dNzOygI2+&{@}=Gp)N diff --git a/input/stationlist.shp b/input/stationlist.shp index e99b17da35dfece8a9544063d57638d1e31a7a87..314bb5ecd3b746aad8c8ce69a214338f9cd01e38 100644 GIT binary patch delta 114 zcmdmTOT6Q@_ylRjBaJexGK{SYP?Q0F={%$rW@~9$6Ki@ delta 17 ZcmccfnC-}8wh7XVj~ZpR$}q0j2mnk$2gv{c diff --git a/postinstall.sh b/postinstall.sh deleted file mode 100644 index 3bd82652..00000000 --- a/postinstall.sh +++ /dev/null @@ -1,46 +0,0 @@ -# begin installing miniconda -if [[ "$TRAVIS_OS_NAME" != "windows" ]]; then - echo "installing miniconda for posix"; - bash $HOME/download/miniconda.sh -b -u -p $MINICONDA_PATH; -elif [[ "$TRAVIS_OS_NAME" == "windows" ]]; then - echo "folder $MINICONDA_SUB_PATH does not exist" - echo "installing miniconda for windows"; - choco install miniconda3 --params="'/JustMe /AddToPath:1 /D:$MINICONDA_PATH_WIN'"; -fi; -# end installing miniconda - -export PATH="$MINICONDA_PATH:$MINICONDA_SUB_PATH:$MINICONDA_LIB_BIN_PATH:$PATH"; - -# begin checking miniconda existance -echo "checking if folder $MINICONDA_SUB_PATH exists" -if [[ -d $MINICONDA_SUB_PATH ]]; then - echo "folder $MINICONDA_SUB_PATH exists" -else - echo "folder $MINICONDA_SUB_PATH does not exist" -fi; -# end checking miniconda existance - -source $MINICONDA_PATH/etc/profile.d/conda.sh; -hash -r; -echo $TRAVIS_OS_NAME -echo $PYTHON_VERSION -python --version - -if [[ "$TRAVIS_OS_NAME" == "windows" ]]; then - echo "Removing mpi4py from environment for windows build" - echo "Package not available in conda channels" - sed -i '/mpi4py/d' ./tcrmenv.yml -fi - -conda config --set always_yes yes --set changeps1 no; -conda update -q conda; -conda config --add channels conda-forge; -conda config --set channel_priority strict; -# Useful for debugging any issues with conda -conda info -a - -echo "Create TCRM environment" -conda env create -q -f tcrmenv.yml python=$PYTHON_VERSION; -conda activate tcrm -python --version -conda list diff --git a/preinstall.sh b/preinstall.sh deleted file mode 100644 index 339b2536..00000000 --- a/preinstall.sh +++ /dev/null @@ -1,24 +0,0 @@ -if [[ "$TRAVIS_OS_NAME" != "windows" ]]; then - export MINICONDA_PATH=$HOME/miniconda; - export MINICONDA_SUB_PATH=$MINICONDA_PATH/bin; -elif [[ "$TRAVIS_OS_NAME" == "windows" ]]; then - export MINICONDA_PATH=$HOME/miniconda; - export MINICONDA_PATH_WIN=`cygpath --windows $MINICONDA_PATH`; - export MINICONDA_SUB_PATH=$MINICONDA_PATH/Scripts; -fi; -export MINICONDA_LIB_BIN_PATH=$MINICONDA_PATH/Library/bin; - # Obtain miniconda installer -if [[ "$TRAVIS_OS_NAME" != "windows" ]]; then - mkdir -p $HOME/download; - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then - echo "downloading miniconda.sh for linux"; - wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O $HOME/download/miniconda.sh; - elif [[ "$TRAVIS_OS_NAME" == "osx" ]]; then - echo "downloading miniconda.sh for osx"; - wget https://repo.continuum.io/miniconda/Miniconda3-latest-MacOSX-x86_64.sh -O $HOME/download/miniconda.sh; - fi; -fi; - # Install openssl for Windows -if [[ "$TRAVIS_OS_NAME" == "windows" ]]; then - choco install openssl.light; -fi; \ No newline at end of file diff --git a/pylintrc b/pylintrc index abad3386..06de6f8f 100755 --- a/pylintrc +++ b/pylintrc @@ -1,5 +1,5 @@ [MESSAGES CONTROL] -disable=W0511,W0142,W1202,I0011,E1003,E1101,E0611,F0401,E1103,E1121 +disable=W0511,W0142,W1201,W1202,I0011,E1003,E1101,E0611,F0401,E1103,E1121,logging-fstring-interpolation [BASIC] attr-rgx=[a-zA-Z_][a-zA-Z0-9_]{0,30}[_]{0,1}$ diff --git a/wind/__init__.py b/wind/__init__.py index f225dac9..b6d24b33 100644 --- a/wind/__init__.py +++ b/wind/__init__.py @@ -7,9 +7,9 @@ primary vortex of the simulated TC, and bounday layer models that define the asymmetry induced by surface friction and forward motion of the TC over the earth's surface. The final output from the module is a -netCDF file containing the maximum surface gust wind speed (a 10-minute -mean wind speed, at 10 metres above ground level), along with the components -(eastward and westward) that generated the wind gust and the minimum +netCDF file containing the maximum surface gust wind speed (a 0.2-second +duration gust wind speed, at 10 metres above ground level), along with the +components (eastward and westward) that generated the wind gust and the minimum mean sea level pressure over the lifetime of the event. If multiple TCs are contained in a track file, then the output file contains the values from all events (for example, an annual maximum wind speed). @@ -759,7 +759,7 @@ def filterTracks(tracks, gridLimit, margin): log.info(f"Filtering tracks in region: {repr(gridLimit)}") validTracks = [t for t in tracks if inRegion(t, gridLimit, margin)] else: - log.info(f"No grid limit set - returning all tracks") + log.info("No grid limit set - returning all tracks") return tracks return validTracks From 7f5b03bbe108c4616eded00d710eae70b800b09f Mon Sep 17 00:00:00 2001 From: Craig Arthur Date: Tue, 9 Nov 2021 10:23:50 +1100 Subject: [PATCH 09/11] Squashed commit of the following: assertDictEqual doesn't like complex values (like arrays) commit 091db2f503f9ad1a5c45ac3306a21eae73d8ea8a --- tests/test_maps.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/tests/test_maps.py b/tests/test_maps.py index 08a31518..4a1edf11 100644 --- a/tests/test_maps.py +++ b/tests/test_maps.py @@ -1,5 +1,4 @@ import unittest - import numpy as np from . import NumpyTestCase from matplotlib.colors import LinearSegmentedColormap @@ -48,16 +47,16 @@ def test_bigLevelValues(self): self.numpyAssertAlmostEqual(lvs, rlevs) self.assertEqual(expo, rexpo) -class TestSelectColorMap(unittest.TestCase): +class TestSelectColorMap(NumpyTestCase.NumpyTestCase): def assertColorMapEqual(self, actual, expected): """Test method for equality of LinearSegmentedColormaps""" + self.assertEqual(type(actual), type(expected)) self.assertEqual(actual.N, expected.N) - self.assertDictEqual(actual._segmentdata, - expected._segmentdata) + self.assertEqual(actual.name, expected.name) for k in list(actual._segmentdata.keys()): - self.assertListEqual(actual._segmentdata[k], - expected._segmentdata[k]) + self.numpyAssertAlmostEqual(actual._segmentdata[k], + expected._segmentdata[k]) def setUp(self): import seaborn as sns From 52581c66794a4c59e28a68cab928597286f55777 Mon Sep 17 00:00:00 2001 From: Craig Arthur Date: Mon, 15 Nov 2021 15:14:08 +1100 Subject: [PATCH 10/11] NHIRS-148: Fixed issue with FlushCache failing. (#121) --- ProcessMultipliers/processMultipliers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ProcessMultipliers/processMultipliers.py b/ProcessMultipliers/processMultipliers.py index 0b187c36..1c3ad5ce 100755 --- a/ProcessMultipliers/processMultipliers.py +++ b/ProcessMultipliers/processMultipliers.py @@ -199,7 +199,7 @@ def checkOutputFolders(self, working_dir, type_mapping): :param dict type_mapping: dict of shielding, terrain, topographic ''' dir_check = os.path.isdir(working_dir) - if dir_check == False: + if dir_check is False: os.makedirs(working_dir) log.info('Creating directories for outputs') else: From a9be18fe3d0cdc04fb6646703147edee20fcb094 Mon Sep 17 00:00:00 2001 From: Craig Arthur Date: Tue, 16 Nov 2021 10:17:40 +1100 Subject: [PATCH 11/11] NHIRS-148: Move FlushCache call --- ProcessMultipliers/processMultipliers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ProcessMultipliers/processMultipliers.py b/ProcessMultipliers/processMultipliers.py index 1c3ad5ce..2e3da4e2 100755 --- a/ProcessMultipliers/processMultipliers.py +++ b/ProcessMultipliers/processMultipliers.py @@ -1095,8 +1095,8 @@ def processMultiplierSegment(segment, source_dir_band, wind_prj, bear_prj, dst_b local[idx] = wind_data[idx] * m4[idx] with threadLock_out: dst_band.WriteArray(local, x_offset, y_offset) - if segment_id % int(math.ceil(total_segments / 100.0)) == 0: dst_band.FlushCache() + if segment_id % int(math.ceil(total_segments / 100.0)) == 0: log.info('Progress: {0:.2f} %'.format((segment_id * 100.0) / total_segments)) class run():