Skip to content

Commit

Permalink
Continued pylint clean up
Browse files Browse the repository at this point in the history
  • Loading branch information
wcarthur committed Oct 14, 2015
1 parent 9a8dc93 commit 7e62016
Show file tree
Hide file tree
Showing 10 changed files with 117 additions and 101 deletions.
27 changes: 21 additions & 6 deletions Evaluate/evaluate.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
from scipy.stats import scoreatpercentile as percentile
from datetime import datetime

import interpolateTracks
import Evaluate.interpolateTracks as interpolateTracks
from Utilities.files import flConfigFile, flStartLog
from Utilities.config import ConfigParser
from Utilities.loadData import loadTrackFile
Expand Down Expand Up @@ -811,6 +811,13 @@ def historic(self):
format(self.historicTrackFile))
return False
else:
lon = []
lat = []

for t in tracks:
#if t.inRegion(self.gridLimit):
lon = np.append(lon, t.Longitude)
lat = np.append(lat, t.Latitude)
self.hist, x, y = self.calc2DHistogram(lon, lat)

return True
Expand All @@ -832,6 +839,13 @@ def synthetic(self):
format(trackFile))
return False
else:
lon = []
lat = []

for t in tracks:
#if t.inRegion(self.gridLimit):
lon = np.append(lon, t.Longitude)
lat = np.append(lat, t.Latitude)
self.synHist[n, :, :], x, y = self.calc2DHistogram(lon, lat)

return True
Expand Down Expand Up @@ -1048,6 +1062,8 @@ def findCrossings(self, tracks):
"""
Given a series of track points and a longitude, calculate
if the tracks intersect that line of longitude
:param tracks: list of `Track` objects.
"""

h = np.zeros((len(self.gateLats) - 1, len(self.gateLons)))
Expand Down Expand Up @@ -1102,7 +1118,7 @@ def historic(self):
self.timeStep)

self.lonCrossingHist, self.lonCrossingEWHist, \
self.lonCrossingWEHist = self.findCrossings(i, lon, lat)
self.lonCrossingWEHist = self.findCrossings(tracks)

return

Expand All @@ -1124,16 +1140,15 @@ def synthetic(self):
return False
else:
self.lonCrossingSyn[n, :], self.lonCrossingSynEW[n, :], \
self.lonCrossingSynWE[
n, :] = self.findCrossings(i, lon, lat)
self.lonCrossingSynWE[n, :] = self.findCrossings(tracks)

return True

def synStats(self):
"""Calculate statistics of synthetic event sets"""

log.debug(
"Calculating statistics for longitude crossings of synthetic events")
log.debug(("Calculating statistics for longitude "
"crossings of synthetic events"))
if not hasattr(self, 'lonCrossingSyn'):
log.critical("Synthetic event sets have not been processed!")
log.critical("Cannot calculate statistics")
Expand Down
55 changes: 28 additions & 27 deletions Evaluate/genesisDensity.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,16 +29,10 @@
from Utilities.parallel import attemptParallel, disableOnWorkers
from Utilities import pathLocator

import Utilities.Intersections as Int

from shapely.geometry import Point, LineString, Polygon
from statsmodels.nonparametric.kernel_density import KDEMultivariate

from PlotInterface.maps import FilledContourMapFigure, saveFigure, levels
from PlotInterface.tracks import TrackMapFigure

log = logging.getLogger(__name__)
log.addHandler(logging.NullHandler())
LOG = logging.getLogger(__name__)
LOG.addHandler(logging.NullHandler())

def loadTracks(trackfile):
"""
Expand Down Expand Up @@ -69,10 +63,17 @@ def loadTracksFromFiles(trackfiles):
yield track

def loadTracksFromPath(path):
"""
Load a collection of track files from a given path.
:param str path: Path to load track files from.
:returns: iterator of :class:`Track` objects.
"""
files = os.listdir(path)
trackfiles = [pjoin(path, f) for f in files if f.startswith('tracks')]
msg = 'Processing %d track files in %s' % (len(trackfiles), path)
log.info(msg)
LOG.info(msg)
return loadTracksFromFiles(sorted(trackfiles))

class GenesisDensity(object):
Expand Down Expand Up @@ -130,7 +131,7 @@ def calculate(self, tracks):
histogram, x, y = np.histogram2d(lon, lat,
[self.lon_range,
self.lat_range],
normed=False)
normed=False)
return histogram

def calculatePDF(self, tracks):
Expand All @@ -152,7 +153,7 @@ def calculatePDF(self, tracks):
lat = np.append(lat, t.Latitude)

xy = np.vstack([self.X.ravel(), self.Y.ravel()])
data = np.array([[lon],[lat]])
data = np.array([[lon], [lat]])

kde = KDEMultivariate(data, bw='cv_ml', var_type='cc')
pdf = kde.pdf(data_predict=xy)
Expand All @@ -165,12 +166,12 @@ def _calculate(self, tracks):
:param tracks: Collection of :class:`Track` objects.
"""
log.debug("Calculating PDF for set of {0:d} tracks".format(len(tracks)))
LOG.debug("Calculating PDF for set of {0:d} tracks".format(len(tracks)))

hist = ma.zeros((len(self.lon_range) - 1,
len(self.lat_range) - 1))

xy= np.vstack([self.X.ravel(), self.Y.ravel()])
xy = np.vstack([self.X.ravel(), self.Y.ravel()])

x = []
y = []
Expand All @@ -188,9 +189,9 @@ def _calculate(self, tracks):
xx = np.array(x)
yy = np.array(y)
ii = np.where((xx >= self.gridLimit['xMin']) &
(xx <= self.gridLimit['xMax']) &
(xx <= self.gridLimit['xMax']) &
(yy >= self.gridLimit['yMin']) &
(yy <= self.gridLimit['yMax']))
(yy <= self.gridLimit['yMax']))

values = np.vstack([xx[ii], yy[ii]])
kernel = KDEMultivariate(values, bw='cv_ml', var_type='cc')
Expand All @@ -215,7 +216,7 @@ def calculateMeans(self):
@disableOnWorkers
def historic(self):
"""Load historic data and calculate histogram"""
log.info("Processing historic track records")
LOG.info("Processing historic track records")
config = ConfigParser()
config.read(self.configFile)
inputFile = config.get('DataProcess', 'InputFile')
Expand All @@ -228,7 +229,7 @@ def historic(self):
tracks = loadTrackFile(self.configFile, inputFile, source)

except (TypeError, IOError, ValueError):
log.critical("Cannot load historical track file: {0}".\
LOG.critical("Cannot load historical track file: {0}".\
format(inputFile))
raise
else:
Expand All @@ -238,7 +239,7 @@ def historic(self):
startYr = min(startYr, min(t.Year))
endYr = max(endYr, max(t.Year))
numYears = endYr - startYr
log.info("Range of years: %d - %d" % (startYr, endYr))
LOG.info("Range of years: %d - %d" % (startYr, endYr))
self.hist = self._calculate(tracks)
#self.hist = self._calculate(tracks) / numYears

Expand All @@ -260,12 +261,12 @@ def synthetic(self):
n = 0
for d in range(1, pp.size()):
pp.send(trackfiles[w], destination=d, tag=work_tag)
log.debug("Processing track file {0:d} of {1:d}".\
LOG.debug("Processing track file {0:d} of {1:d}".\
format(w, len(trackfiles)))
w += 1

terminated = 0
while (terminated < pp.size() - 1):
while terminated < pp.size() - 1:
results, status = pp.receive(pp.any_source, tag=result_tag,
return_status=True)
self.synHist[n, :, :] = results
Expand All @@ -274,7 +275,7 @@ def synthetic(self):
d = status.source
if w < len(trackfiles):
pp.send(trackfiles[w], destination=d, tag=work_tag)
log.debug("Processing track file {0:d} of {1:d}".\
LOG.debug("Processing track file {0:d} of {1:d}".\
format(w, len(trackfiles)))
w += 1
else:
Expand All @@ -284,19 +285,19 @@ def synthetic(self):
self.calculateMeans()

elif (pp.size() > 1) and (pp.rank() != 0):
while(True):
while True:
trackfile = pp.receive(source=0, tag=work_tag)
if trackfile is None:
break

log.debug("Processing %s" % (trackfile))
LOG.debug("Processing %s", trackfile)
tracks = loadTracks(trackfile)
results = self._calculate(tracks) #/ self.synNumYears
pp.send(results, destination=0, tag=result_tag)

elif (pp.size() == 1) and (pp.rank() == 0):
for n, trackfile in enumerate(trackfiles):
log.debug("Processing track file {0:d} of {1:d}".\
LOG.debug("Processing track file {0:d} of {1:d}".\
format(n + 1, len(trackfiles)))
tracks = loadTracks(trackfile)
self.synHist[n, :, :] = self._calculate(tracks) #/ self.synNumYears
Expand All @@ -309,12 +310,12 @@ def save(self):

# Simple sanity check (should also include the synthetic data):
if not hasattr(self, 'hist'):
log.critical("No historical data available!")
log.critical(("Check that data has been processed "
LOG.critical("No historical data available!")
LOG.critical(("Check that data has been processed "
"before trying to save data"))
return

log.info('Saving genesis density data to {0}'.format(dataFile))
LOG.info('Saving genesis density data to {0}'.format(dataFile))
dimensions = {
0: {
'name': 'lat',
Expand Down
26 changes: 13 additions & 13 deletions Evaluate/landfallRates.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,8 @@

from PlotInterface.curves import RangeCompareCurve, saveFigure

log = logging.getLogger(__name__)
log.addHandler(logging.NullHandler())
LOG = logging.getLogger(__name__)
LOG.addHandler(logging.NullHandler())

def loadTracks(trackfile):
"""
Expand Down Expand Up @@ -70,8 +70,8 @@ def __init__(self, configFile):
try:
gateFile = config.get('Input', 'CoastlineGates')
except NoOptionError:
log.exception(("No coastline gate file specified "
"in configuration file"))
LOG.exception(("No coastline gate file specified "
"in configuration file"))
raise

gateData = np.genfromtxt(gateFile, delimiter=',')
Expand Down Expand Up @@ -174,7 +174,7 @@ def setOutput(self, ntracks):
def historic(self):
"""Calculate historical rates of landfall"""

log.info("Processing landfall rates of historical tracks")
LOG.info("Processing landfall rates of historical tracks")
config = ConfigParser()
config.read(self.configFile)
inputFile = config.get('DataProcess', 'InputFile')
Expand All @@ -187,7 +187,7 @@ def historic(self):
try:
tracks = loadTrackFile(self.configFile, inputFile, source)
except (TypeError, IOError, ValueError):
log.critical("Cannot load historical track file: {0}".\
LOG.critical("Cannot load historical track file: {0}".\
format(inputFile))
raise
else:
Expand All @@ -198,7 +198,7 @@ def historic(self):

def synthetic(self):
"""Load synthetic data and calculate histogram"""
log.info("Processing landfall rates of synthetic events")
LOG.info("Processing landfall rates of synthetic events")

work_tag = 0
result_tag = 1
Expand All @@ -214,12 +214,12 @@ def synthetic(self):
n = 0
for d in range(1, pp.size()):
pp.send(trackfiles[w], destination=d, tag=work_tag)
log.debug("Processing track file {0:d} of {1:d}".\
LOG.debug("Processing track file {0:d} of {1:d}".\
format(w + 1, len(trackfiles)))
w += 1

terminated = 0
while (terminated < pp.size() - 1):
while terminated < pp.size() - 1:
results, status = pp.receive(pp.any_source, tag=result_tag,
return_status=True)

Expand All @@ -230,7 +230,7 @@ def synthetic(self):

if w < len(trackfiles):
pp.send(trackfiles[w], destination=d, tag=work_tag)
log.debug("Processing track file {0:d} of {1:d}".\
LOG.debug("Processing track file {0:d} of {1:d}".\
format(w + 1, len(trackfiles)))
w += 1
else:
Expand All @@ -240,20 +240,20 @@ def synthetic(self):
self.calculateStats()

elif (pp.size() > 1) and (pp.rank() != 0):
while(True):
while True:
trackfile = pp.receive(source=0, tag=work_tag)
if trackfile is None:
break

log.debug("Processing %s" % (trackfile))
LOG.debug("Processing %s", trackfile)
tracks = loadTracks(trackfile)
results = self.processTracks(tracks)
pp.send(results, destination=0, tag=result_tag)

elif pp.size() == 1 and pp.rank() == 0:
# Assumed no Pypar - helps avoid the need to extend DummyPypar()
for n, trackfile in enumerate(sorted(trackfiles)):
log.debug("Processing track file {0:d} of {1:d}".\
LOG.debug("Processing track file {0:d} of {1:d}".\
format(n + 1, len(trackfiles)))
tracks = loadTracks(trackfile)
results = self.processTracks(tracks)
Expand Down
Loading

0 comments on commit 7e62016

Please sign in to comment.