Skip to content

Commit

Permalink
Merge commit '5a54388667a7d22f13405bf27a396f0cb5a9f5bc' into trackval…
Browse files Browse the repository at this point in the history
…idation
  • Loading branch information
wcarthur committed Apr 29, 2020
2 parents 1a3f34b + 5a54388 commit ff3d348
Show file tree
Hide file tree
Showing 5 changed files with 56 additions and 24 deletions.
3 changes: 3 additions & 0 deletions README.rst
Expand Up @@ -76,6 +76,9 @@ Status
.. image:: https://landscape.io/github/GeoscienceAustralia/tcrm/develop/landscape.svg?style=flat
:target: https://landscape.io/github/GeoscienceAustralia/tcrm/v2.1
:alt: Code Health

.. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.3741493.svg
:target: https://doi.org/10.5281/zenodo.3741493

Screenshot
==========
Expand Down
4 changes: 4 additions & 0 deletions Utilities/config.py
Expand Up @@ -12,6 +12,8 @@

import io
from configparser import RawConfigParser
import os.path

#from ast import literal_eval as eval

def parseBool(txt):
Expand Down Expand Up @@ -273,6 +275,8 @@ def read(self, filename):
return
if self.readonce:
return
if not os.path.exists(filename):
raise ValueError("config file does not exist: {}".format(filename))
RawConfigParser.read(self, filename)
self.readonce = True

Expand Down
2 changes: 1 addition & 1 deletion Utilities/nctools.py
Expand Up @@ -130,7 +130,7 @@ def ncGetData(ncobj, var):
try:
varobj = ncobj.variables[var]
except KeyError:
logger.exception(f"{ncobj.filepath()} does not contain variable {name}")
logger.exception(f"{ncobj.filepath()} does not contain variable {var}")
raise

# Automatic conversion of masked values and unpacking
Expand Down
69 changes: 47 additions & 22 deletions Utilities/timeseries.py
Expand Up @@ -11,11 +11,11 @@
"""

import logging
import numpy as np

from os.path import join as pjoin

from configparser import NoOptionError

import numpy as np

from Utilities.config import ConfigParser
from Utilities.files import flLoadFile
from Utilities.maputils import find_index
Expand All @@ -31,14 +31,14 @@
OUTPUT_NAMES = ('Station', 'Time', 'Longitude', 'Latitude',
'Speed', 'UU', 'VV', 'Bearing',
'Pressure')
OUTPUT_TYPES = ['|U16', '|U16', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8']
OUTPUT_TYPES = ['|U16', '|U16', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8']
OUTPUT_FMT = ['%s', '%s', '%9.5f', '%9.5f',
'%6.2f', '%6.2f', '%6.2f', '%6.2f',
'%7.2f']

MINMAX_NAMES = ('Station', 'Time', 'Longitude', 'Latitude',
'Speed', 'UU', 'VV', 'Bearing', 'Pressure')
MINMAX_TYPES = ['|U16', '|U16', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8']
MINMAX_TYPES = ['|U16', '|U16', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8']
MINMAX_FMT = ['%s', '%s', '%9.5f', '%9.5f',
'%6.2f', '%6.2f', '%6.2f', '%6.2f',
'%7.2f']
Expand All @@ -49,6 +49,21 @@
"""

class Station(object):
"""Station:
Description: An object to represent a location for which time series
data will be extracted
Members:
`id`: Unique id string for the station
`lon`: Longitude of the station (geographic coordinates)
`lat`: Latitude of the station (geographic coordinates)
`data`: A `DynamicRecArray` to hold the time series data
Methods:
`insideGrid`: Determine if the station is inside the simulation domain.
"""

def __init__(self, stationid, longitude, latitude):

self.id = stationid
Expand All @@ -61,8 +76,7 @@ def __getattr__(self, key):
"""
Get the `key` from the `data` object.
:type key: str
:param key: the key to lookup in the `data` object.
:param str key: the key to lookup in the `data` object.
"""
if key.startswith('__') and key.endswith('__'):
return super(Station, self).__getattr__(key)
Expand All @@ -84,12 +98,24 @@ def insideGrid(self, gridx, gridy):
class Timeseries(object):
"""Timeseries:
Description:
Description: Extract data at a set of :class:`Station`s
Parameters:
:param str configFile: Path to a TCRM configuration file
Members:
`meta`: Boolean whether additional metadata is attached to the `Station`s
`outputPath`: Directory where extracted data will be stored in csv-format files
`minfile`: Name of the file where minima for all `Station`s will be stored.
This will be the `outputPath` folder
`maxfile`: As above, but for maxima (e.g. maximum wind speeds)
`stations`: A list of `Station` objects, read from a file containing details of the stations
Methods:
Internal methods:
"""

def __init__(self, configFile):
Expand All @@ -107,16 +133,16 @@ def __init__(self, configFile):

stnFile = config.get('Timeseries', 'LocationFile')
self.outputPath = pjoin(config.get('Output', 'Path'),
'process', 'timeseries')
'process', 'timeseries')

self.maxfile = pjoin(config.get('Output', 'Path'),
'process', 'maxima.csv')
'process', 'maxima.csv')
self.minfile = pjoin(config.get('Output', 'Path'),
'process', 'minima.csv')
'process', 'minima.csv')


log.info("Loading timeseries stations from %s"%stnFile)
log.debug("Timeseries data will be written into %s"%self.outputPath)
log.info(f"Loading timeseries stations from {stnFile}")
log.debug(f"Timeseries data will be written into {self.outputPath}")
self.stations = []
if stnFile.endswith("shp"):
try:
Expand Down Expand Up @@ -145,8 +171,8 @@ def __init__(self, configFile):
stnlat = stndata[:, 2].astype(float)
for sid, lon, lat in zip(stnid, stnlon, stnlat):
self.stations.append(Station(sid, lon, lat))
log.info("There are {0} stations that will collect timeseries data".format(len(self.stations)))
log.info(f"There are {len(self.stations)} stations that will collect timeseries data")

def sample(self, lon, lat, spd, uu, vv, prs, gridx, gridy):
"""
Extract values from 2-dimensional grids at the given lat/lon.
Expand Down Expand Up @@ -195,14 +221,14 @@ def extract(self, dt, spd, uu, vv, prs, gridx, gridy):
if stn.insideGrid(gridx, gridy):
stns += 1
result = self.sample(stn.lon, stn.lat, spd, uu, vv, prs,
gridx, gridy)
gridx, gridy)
ss, ux, vy, bb, pp = result
stn.data.append((str(stn.id), dt, stn.lon, stn.lat, ss,
ux, vy, bb, pp))
ux, vy, bb, pp))

else:
stn.data.append((str(stn.id), dt, stn.lon, stn.lat, 0.0, 0.0,
0.0, 0.0, prs[0, 0]))
0.0, 0.0, prs[0, 0]))
log.debug("Extracted data for {0} stations".format(stns))

def shutdown(self):
Expand All @@ -212,7 +238,7 @@ def shutdown(self):

header = 'Station,Time,Longitude,Latitude,Speed,UU,VV,Bearing,Pressure'
maxheader = ('Station,Time,Longitude,Latitude,Speed,'
'UU,VV,Bearing,Pressure')
'UU,VV,Bearing,Pressure')

max_data = DynamicRecArray(dtype={'names': MINMAX_NAMES,
'formats':MINMAX_TYPES})
Expand All @@ -236,9 +262,9 @@ def shutdown(self):


np.savetxt(self.maxfile, max_data.data, fmt=MINMAX_FMT, delimiter=',',
header=maxheader, comments='')
header=maxheader, comments='')
np.savetxt(self.minfile, min_data.data, fmt=MINMAX_FMT, delimiter=',',
header=maxheader, comments='')
header=maxheader, comments='')
"""
for stn in self.stations:
if type(self.maxdata[stn.id][3]) == datetime.datetime:
Expand Down Expand Up @@ -272,4 +298,3 @@ def shutdown(self):
# '%6.2f','%6.2f','%7.2f'] )
"""
log.info("Station data written to file")

2 changes: 1 addition & 1 deletion wind/windmodels.py
Expand Up @@ -332,7 +332,7 @@ def secondDerivative(self):
assert d2Vm < 0.0
except AssertionError:
log.critical(("Pressure deficit: {0:.2f} hPa,"
" RMW: {1:%2f} km".format(dP/100., rMax/1000.)))
" RMW: {1:.2f} km".format(dP/100., rMax/1000.)))
raise

return d2Vm
Expand Down

0 comments on commit ff3d348

Please sign in to comment.