Skip to content

Commit

Permalink
Merge commit 'cf4fe2bfc3851f340edf3d6788fa0bced17cba1b' into develop
Browse files Browse the repository at this point in the history
  • Loading branch information
wcarthur committed Jun 17, 2020
2 parents 380a7ac + cf4fe2b commit cd921d0
Show file tree
Hide file tree
Showing 9 changed files with 189 additions and 39 deletions.
109 changes: 95 additions & 14 deletions ProcessMultipliers/processMultipliers.py
Expand Up @@ -99,8 +99,17 @@ def __init__(self, configFile):
config = ConfigParser()
config.read(configFile)

# Check for computed wind multiplier file path in config file
if config.has_option('Input', 'Multipliers'):
self.computed_wm_path = config.get('Input', 'Multipliers')
log.info('Using computed wind multiplier files from {0}'.format(self.computed_wm_path))
if config.has_option('Input', 'Extent'):
self.extent = config.geteval('Input', 'Extent')
log.info('Provided extent {0}'.format(self.extent))
else:
self.extent = dict()
# Check for wind multiplier file path in config file
if config.has_option('Input', 'RawMultipliers'):
elif config.has_option('Input', 'RawMultipliers'):
self.WMPath = config.get('Input', 'RawMultipliers')
log.info('Using multiplier files from {0}'.format(self.WMPath))
else:
Expand Down Expand Up @@ -163,7 +172,7 @@ def copyTranslateMultipliers(self, tiles, configFile, type_mapping, working_dir)
'NETCDF:{0}{1}/{2}:{3} {4}{5}.tif'
.format(working_dir, wm, output, var, working_dir,
output_name[:-3])) # -3 to drop '.nc'

log.info('%s translated to Geotif', output_name[:-3])

def mergeWindMultipliers(self, type_mapping, dirns, working_dir):
Expand Down Expand Up @@ -210,7 +219,7 @@ def combineDirections(self, dirns, working_dir):
m_data1 = ma.masked_values(data1, -9999)
m_data2 = ma.masked_values(data2, -9999)
m_data3 = ma.masked_values(data3, -9999)

log.debug('Size of data arrays: terrain = {0}, topographic = {1}, shielding = {2}'
.format(m_data1.shape, m_data2.shape, m_data3.shape))

Expand All @@ -225,6 +234,73 @@ def combineDirections(self, dirns, working_dir):
bandOut.SetNoDataValue(-9999)
BandWriteArray(bandOut, dataOut.data)

def extractDirections(self, dirns, output_path):
"""
Create Geotiffs for wind multiplier (terrain, topographic and shielding combined) into
a single Geotiff from 8-band source file by applying specified extent.
Output files are named "m4_" direction.
:param str dirns: list of eight ordinal directions for wind
:param str output_path: path to the output directory
"""
band_index = 1
log.info('Multipliers will be written to {0}'.format(output_path))
for dirn in dirns:
log.info('working on %s', dirn)
os.system('gdal_translate -a_srs EPSG:4326 -of GTiff -projwin '
'{0} {1} {2} {3} -b {4} "{5}" {6}m4_{7}.tif'
.format(self.extent['xMin'], self.extent['yMax'], self.extent['xMax'], self.extent['yMin'],
band_index, self.computed_wm_path, output_path, dirn))
band_index += 1


def computeOutputExtentIfInvalid(extent, gust_file, computed_wm_path):
"""
Check validity of extent. If extent is not valid, output image extent is computed from
regional wind data (gust file) and wind multiplier image extents.
:param dict extent: extent provided in config file
:param str gust_file: file path of regional wind data / gust file
:param str computed_wm_path: file path of wind multiplier image
"""
if 'xMin' in extent and 'xMax' in extent and 'yMin' in extent and 'yMax' in extent:
return extent

log.info('Invalid extent. Calculating default extent.')
ncobj = Dataset(gust_file, 'r')
lat = ncobj.variables['lat'][:]
lon = ncobj.variables['lon'][:]
xMinGust = min(lon)
xMaxGust = max(lon)
yMinGust = min(lat)
yMaxGust = max(lat)
log.info('Extent from regional wind data '
'{{\'xMin\': {0}, \'xMax\': {1}, \'yMin\': {2}, \'yMax\': {3} }}'
.format(xMinGust, xMaxGust, yMinGust, yMaxGust))
del ncobj

# Calculate extent of wind multiplier image
ds = gdal.Open(computed_wm_path, gdal.GA_ReadOnly)
widthWM = ds.RasterXSize
heightWM = ds.RasterYSize
gt = ds.GetGeoTransform()
xMinWM = gt[0]
yMinWM = gt[3] + widthWM * gt[4] + heightWM * gt[5]
xMaxWM = gt[0] + widthWM * gt[1] + heightWM * gt[2]
yMaxWM = gt[3]
log.info('Extent from wind multiplier image '
'{{\'xMin\': {0}, \'xMax\': {1}, \'yMin\': {2}, \'yMax\': {3} }}'
.format(xMinWM, xMaxWM, yMinWM, yMaxWM))
del ds

# Take only intersecting extent of provided extent and wind multiplier image extent
extent = dict(xMin=max(xMinGust, xMinWM),
xMax=min(xMaxGust, xMaxWM),
yMin=max(yMinGust, yMinWM),
yMax=min(yMaxGust, yMaxWM))
log.info('Applying effective extent {0}'.format(extent))
return extent

def generate_syn_mult_img(tl_x, tl_y, delta, dir_path, shape,
indices=syn_indices,
every_fill=None):
Expand Down Expand Up @@ -392,7 +468,7 @@ def calculateBearing(uu, vv):

@timer
def reprojectDataset(src_file, match_filename, dst_filename,
resampling_method = gdalconst.GRA_Bilinear,
resampling_method = gdalconst.GRA_Bilinear,
match_projection = None):
"""
Reproject a source dataset to match the projection of another
Expand Down Expand Up @@ -569,7 +645,7 @@ def processMult(wspd, uu, vv, lon, lat, working_dir, m4_max_file = 'm4_ne.tif'):
return output_file

class run():

def __init__(self):

"""
Expand Down Expand Up @@ -654,15 +730,20 @@ def main(self):
"""
log.debug('Instantiating getMultipliers class')
gM = getMultipliers(self.configFile)
log.debug('Running checkOutputFolders')
gM.checkOutputFolders(self.working_dir, self.type_mapping)
log.debug('Running Translate Multipliers')
gM.copyTranslateMultipliers(self.tiles, self.configFile,
self.type_mapping, self.working_dir)
log.debug('Running mergeWindMultipliers')
gM.mergeWindMultipliers(self.type_mapping, self.dirns, self.working_dir)
log.debug('Running combineDirections')
gM.combineDirections(self.dirns, self.working_dir)
if gM.computed_wm_path is None:
log.debug('Running checkOutputFolders')
gM.checkOutputFolders(self.working_dir, self.type_mapping)
log.debug('Running Translate Multipliers')
gM.copyTranslateMultipliers(self.tiles, self.configFile,
self.type_mapping, self.working_dir)
log.debug('Running mergeWindMultipliers')
gM.mergeWindMultipliers(self.type_mapping, self.dirns, self.working_dir)
log.debug('Running combineDirections')
gM.combineDirections(self.dirns, self.working_dir)
else:
gM.extent = computeOutputExtentIfInvalid(gM.extent, self.gust_file, gM.computed_wm_path)
log.debug('Running extractDirections')
gM.extractDirections(self.dirns, self.working_dir)

# Load the wind data:
log.info("Loading regional wind data from {0}".format(self.gust_file))
Expand Down
3 changes: 3 additions & 0 deletions README.rst
Expand Up @@ -76,6 +76,9 @@ Status
.. image:: https://landscape.io/github/GeoscienceAustralia/tcrm/develop/landscape.svg?style=flat
:target: https://landscape.io/github/GeoscienceAustralia/tcrm/v2.1
:alt: Code Health

.. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.3741493.svg
:target: https://doi.org/10.5281/zenodo.3741493

Screenshot
==========
Expand Down
7 changes: 7 additions & 0 deletions StatInterface/GenerateDistributions.py
Expand Up @@ -154,6 +154,13 @@ def allDistributions(self, lonLat, parameterList, parameterName=None,

self.pName = parameterName

if len(self.pList) != len(self.lonLat):
errmsg = ("Parameter data and "
"Lon/Lat data are not the same length "
"for {}.".format(parameterName))
self.logger.critical(errmsg)
raise IndexError(errmsg)

maxCellNum = stats.maxCellNum(self.gridLimit, self.gridSpace)

# Writing CDF dataset for all individual cell number into files
Expand Down
2 changes: 1 addition & 1 deletion StatInterface/StatInterface.py
Expand Up @@ -113,7 +113,7 @@ def kdeGenesisDate(self):
log.debug('Reading data from %s',
pjoin(self.processPath, 'jdays'))
pList = pjoin(self.processPath, 'jdays')
lonLat = pjoin(self.processPath, 'init_lon_lat')
lonLat = pjoin(self.processPath, 'origin_lon_lat')
kde = KDEParameters.KDEParameters(self.kdeType)
#kde.generateGenesisDateCDF(jdays, lonLat, bw=14,
# genesisKDE=pjoin(self.processPath,
Expand Down
4 changes: 4 additions & 0 deletions Utilities/config.py
Expand Up @@ -12,6 +12,8 @@

import io
from configparser import RawConfigParser
import os.path

#from ast import literal_eval as eval

def parseBool(txt):
Expand Down Expand Up @@ -273,6 +275,8 @@ def read(self, filename):
return
if self.readonce:
return
if not os.path.exists(filename):
raise ValueError("config file does not exist: {}".format(filename))
RawConfigParser.read(self, filename)
self.readonce = True

Expand Down
2 changes: 1 addition & 1 deletion Utilities/nctools.py
Expand Up @@ -130,7 +130,7 @@ def ncGetData(ncobj, var):
try:
varobj = ncobj.variables[var]
except KeyError:
logger.exception(f"{ncobj.filepath()} does not contain variable {name}")
logger.exception(f"{ncobj.filepath()} does not contain variable {var}")
raise

# Automatic conversion of masked values and unpacking
Expand Down
69 changes: 47 additions & 22 deletions Utilities/timeseries.py
Expand Up @@ -11,11 +11,11 @@
"""

import logging
import numpy as np

from os.path import join as pjoin

from configparser import NoOptionError

import numpy as np

from Utilities.config import ConfigParser
from Utilities.files import flLoadFile
from Utilities.maputils import find_index
Expand All @@ -31,14 +31,14 @@
OUTPUT_NAMES = ('Station', 'Time', 'Longitude', 'Latitude',
'Speed', 'UU', 'VV', 'Bearing',
'Pressure')
OUTPUT_TYPES = ['|U16', '|U16', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8']
OUTPUT_TYPES = ['|U16', '|U16', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8']
OUTPUT_FMT = ['%s', '%s', '%9.5f', '%9.5f',
'%6.2f', '%6.2f', '%6.2f', '%6.2f',
'%7.2f']

MINMAX_NAMES = ('Station', 'Time', 'Longitude', 'Latitude',
'Speed', 'UU', 'VV', 'Bearing', 'Pressure')
MINMAX_TYPES = ['|U16', '|U16', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8']
MINMAX_TYPES = ['|U16', '|U16', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8', 'f8']
MINMAX_FMT = ['%s', '%s', '%9.5f', '%9.5f',
'%6.2f', '%6.2f', '%6.2f', '%6.2f',
'%7.2f']
Expand All @@ -49,6 +49,21 @@
"""

class Station(object):
"""Station:
Description: An object to represent a location for which time series
data will be extracted
Members:
`id`: Unique id string for the station
`lon`: Longitude of the station (geographic coordinates)
`lat`: Latitude of the station (geographic coordinates)
`data`: A `DynamicRecArray` to hold the time series data
Methods:
`insideGrid`: Determine if the station is inside the simulation domain.
"""

def __init__(self, stationid, longitude, latitude):

self.id = stationid
Expand All @@ -61,8 +76,7 @@ def __getattr__(self, key):
"""
Get the `key` from the `data` object.
:type key: str
:param key: the key to lookup in the `data` object.
:param str key: the key to lookup in the `data` object.
"""
if key.startswith('__') and key.endswith('__'):
return super(Station, self).__getattr__(key)
Expand All @@ -84,12 +98,24 @@ def insideGrid(self, gridx, gridy):
class Timeseries(object):
"""Timeseries:
Description:
Description: Extract data at a set of :class:`Station`s
Parameters:
:param str configFile: Path to a TCRM configuration file
Members:
`meta`: Boolean whether additional metadata is attached to the `Station`s
`outputPath`: Directory where extracted data will be stored in csv-format files
`minfile`: Name of the file where minima for all `Station`s will be stored.
This will be the `outputPath` folder
`maxfile`: As above, but for maxima (e.g. maximum wind speeds)
`stations`: A list of `Station` objects, read from a file containing details of the stations
Methods:
Internal methods:
"""

def __init__(self, configFile):
Expand All @@ -107,16 +133,16 @@ def __init__(self, configFile):

stnFile = config.get('Timeseries', 'LocationFile')
self.outputPath = pjoin(config.get('Output', 'Path'),
'process', 'timeseries')
'process', 'timeseries')

self.maxfile = pjoin(config.get('Output', 'Path'),
'process', 'maxima.csv')
'process', 'maxima.csv')
self.minfile = pjoin(config.get('Output', 'Path'),
'process', 'minima.csv')
'process', 'minima.csv')


log.info("Loading timeseries stations from %s"%stnFile)
log.debug("Timeseries data will be written into %s"%self.outputPath)
log.info(f"Loading timeseries stations from {stnFile}")
log.debug(f"Timeseries data will be written into {self.outputPath}")
self.stations = []
if stnFile.endswith("shp"):
try:
Expand Down Expand Up @@ -145,8 +171,8 @@ def __init__(self, configFile):
stnlat = stndata[:, 2].astype(float)
for sid, lon, lat in zip(stnid, stnlon, stnlat):
self.stations.append(Station(sid, lon, lat))
log.info("There are {0} stations that will collect timeseries data".format(len(self.stations)))
log.info(f"There are {len(self.stations)} stations that will collect timeseries data")

def sample(self, lon, lat, spd, uu, vv, prs, gridx, gridy):
"""
Extract values from 2-dimensional grids at the given lat/lon.
Expand Down Expand Up @@ -195,14 +221,14 @@ def extract(self, dt, spd, uu, vv, prs, gridx, gridy):
if stn.insideGrid(gridx, gridy):
stns += 1
result = self.sample(stn.lon, stn.lat, spd, uu, vv, prs,
gridx, gridy)
gridx, gridy)
ss, ux, vy, bb, pp = result
stn.data.append((str(stn.id), dt, stn.lon, stn.lat, ss,
ux, vy, bb, pp))
ux, vy, bb, pp))

else:
stn.data.append((str(stn.id), dt, stn.lon, stn.lat, 0.0, 0.0,
0.0, 0.0, prs[0, 0]))
0.0, 0.0, prs[0, 0]))
log.debug("Extracted data for {0} stations".format(stns))

def shutdown(self):
Expand All @@ -212,7 +238,7 @@ def shutdown(self):

header = 'Station,Time,Longitude,Latitude,Speed,UU,VV,Bearing,Pressure'
maxheader = ('Station,Time,Longitude,Latitude,Speed,'
'UU,VV,Bearing,Pressure')
'UU,VV,Bearing,Pressure')

max_data = DynamicRecArray(dtype={'names': MINMAX_NAMES,
'formats':MINMAX_TYPES})
Expand All @@ -236,9 +262,9 @@ def shutdown(self):


np.savetxt(self.maxfile, max_data.data, fmt=MINMAX_FMT, delimiter=',',
header=maxheader, comments='')
header=maxheader, comments='')
np.savetxt(self.minfile, min_data.data, fmt=MINMAX_FMT, delimiter=',',
header=maxheader, comments='')
header=maxheader, comments='')
"""
for stn in self.stations:
if type(self.maxdata[stn.id][3]) == datetime.datetime:
Expand Down Expand Up @@ -272,4 +298,3 @@ def shutdown(self):
# '%6.2f','%6.2f','%7.2f'] )
"""
log.info("Station data written to file")

0 comments on commit cd921d0

Please sign in to comment.