Skip to content
This repository has been archived by the owner on Nov 10, 2017. It is now read-only.

Commit

Permalink
Added a warning [skip CI]
Browse files Browse the repository at this point in the history
  • Loading branch information
micheles committed Mar 28, 2017
2 parents b2ba743 + 2534b51 commit 237dbca
Show file tree
Hide file tree
Showing 22 changed files with 251 additions and 98 deletions.
8 changes: 8 additions & 0 deletions debian/changelog
Original file line number Diff line number Diff line change
@@ -1,5 +1,13 @@
[Michele Simionato]
* Experimental support for the Grid Engine
* Fixed the serialization to HDF5 of nonparametric ruptures

[Paolo Tormene]
* Fixed clockwise order of vertices of surface boundaries, as needed for
WKT MULTIPOLYGON strings while exporting ruptures

[Michele Simionato]
* Added input checks to the GmfComputer
* Fixed bug when splitting area sources into point sources for
YoungsCoppersmith1985MFD
* Added a method `hdf5.File.save` to save node dictionaries in HDF5 format
Expand Down
6 changes: 2 additions & 4 deletions doc/implementing-new-gsim.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,12 @@ https://github.com/gem/oq-hazardlib/tree/master/openquake/hazardlib/tests/gsim
- When tests are passing, update the forked repository, rerun test and if everything is still them open a pull request. To run the full suite of tests you must open a terminal and run the commands:

```bash
cd <to_hazardlib_root_directory>
cd <to_hazardlib_root_directory>;
nosetests
```

- Create a new .rst file (needed to generate automatically documentation) in this directory (several examples available):
https://github.com/gem/oq-hazardlib/tree/master/doc/sphinx/gsim

- Check that the new code fulfils PEP8 standards (usually we do this using tools such as pep8 https://pypi.python.org/pypi/pep8)
- Check that the new code fulfils PEP8 standards (usually we do this using tools such as flake8 https://pypi.python.org/pypi/flake8)
http://legacy.python.org/dev/peps/pep-0008/

- Run pylint to check your code.
52 changes: 51 additions & 1 deletion openquake/baselib/hdf5.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ def extend(dset, array):
dset[length:newlength] = array


def extend3(hdf5path, key, array):
def extend3(hdf5path, key, array, **attrs):
"""
Extend an HDF5 file dataset with the given array
"""
Expand All @@ -78,6 +78,8 @@ def extend3(hdf5path, key, array):
dset = create(h5, key, array.dtype,
shape=(None,) + array.shape[1:])
extend(dset, array)
for key, val in attrs.items():
dset.attrs[key] = val
h5.flush()


Expand Down Expand Up @@ -202,6 +204,43 @@ def dotname2cls(dotname):
return getattr(importlib.import_module(modname), clsname)


def get_nbytes(dset):
"""
If the dataset has an attribute 'nbytes', return it. Otherwise get the size
of the underlying array. Returns None if the dataset is actually a group.
"""
if 'nbytes' in dset.attrs:
# look if the dataset has an attribute nbytes
return dset.attrs['nbytes']
elif hasattr(dset, 'value'):
# else extract nbytes from the underlying array
return dset.size * numpy.zeros(1, dset.dtype).nbytes


class ByteCounter(object):
"""
A visitor used to measure the dimensions of a HDF5 dataset or group.
Use it as ByteCounter.get_nbytes(dset_or_group).
"""
@classmethod
def get_nbytes(cls, dset):
nbytes = get_nbytes(dset)
if nbytes is not None:
return nbytes
# else dip in the tree
self = cls()
dset.visititems(self)
return self.nbytes

def __init__(self, nbytes=0):
self.nbytes = nbytes

def __call__(self, name, dset_or_group):
nbytes = get_nbytes(dset_or_group)
if nbytes:
self.nbytes += nbytes


class File(h5py.File):
"""
Subclass of :class:`h5py.File` able to store and retrieve objects
Expand Down Expand Up @@ -267,6 +306,17 @@ def __getitem__(self, path):
else:
return h5obj

def set_nbytes(self, key, nbytes=None):
"""
Set the `nbytes` attribute on the HDF5 object identified by `key`.
"""
obj = super(File, self).__getitem__(key)
if nbytes is not None: # size set from outside
obj.attrs['nbytes'] = nbytes
else: # recursively determine the size of the datagroup
obj.attrs['nbytes'] = nbytes = ByteCounter.get_nbytes(obj)
return nbytes

def save(self, nodedict, root=''):
"""
Save a node dictionary in the .hdf5 file, starting from the root
Expand Down
9 changes: 6 additions & 3 deletions openquake/baselib/parallel.py
Original file line number Diff line number Diff line change
Expand Up @@ -484,13 +484,15 @@ def monitor(self, operation=None, autoflush=False, measuremem=False):

def run(self, *args, **kw):
"""
Run the computer with the given arguments; one specify extra arguments
`acc` and `Starmap`.
Run the computer with the given arguments; one can specify the extra
arguments `acc` and `Starmap`.
"""
acc = kw.get('acc')
starmap = kw.get('Starmap', Starmap)
wakeup_pool() # if not already started
return starmap(self, self.gen_args(*args)).reduce(self.aggregate, acc)
with self.monitor('complete runtime', measuremem=True, autoflush=True):
return starmap(self, self.gen_args(*args)).reduce(
self.aggregate, acc)


class Starmap(object):
Expand Down Expand Up @@ -661,6 +663,7 @@ def submit_all(self):
return IterResult([fut], self.name)

if self.distribute == 'qsub':
logging.warn('EXPERIMENTAL: sending tasks to the grid engine')
allargs = list(self.task_args)
return IterResult(qsub(self.task_func, allargs),
self.name, len(allargs), self.progress)
Expand Down
21 changes: 8 additions & 13 deletions openquake/hazardlib/calc/gmf.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,6 @@
Module :mod:`~openquake.hazardlib.calc.gmf` exports
:func:`ground_motion_fields`.
"""

import collections
import numpy
import scipy.stats

Expand Down Expand Up @@ -78,26 +76,26 @@ class GmfComputer(object):
# attribute. Then the `.compute(gsim, num_events)` method is called and
# a matrix of size (I, N, E) is returned, where I is the number of
# IMTs, N the number of affected sites and E the number of events. The
# seed is extracted from the underlying rupture and salted in such a
# way to produce different numbers even if the method is called twice
# with the same `gsim`. This ensures that different GMPE logic tree
# realizations produce different numbers even in the case of sampling.
# If all GMPEs are different the salt is 0 and the rupture seed is used.
# seed is extracted from the underlying rupture.
def __init__(self, rupture, sites, imts, gsims,
truncation_level=None, correlation_model=None, samples=0):
assert sites, sites
if len(sites) == 0:
raise ValueError('No sites')
elif len(imts) == 0:
raise ValueError('No IMTs')
elif len(gsims) == 0:
raise ValueError('No GSIMs')
self.rupture = rupture
self.sites = sites
self.imts = [from_string(imt) for imt in imts]
self.gsims = sorted(set(gsims))
self.gsims = sorted(gsims)
self.truncation_level = truncation_level
self.correlation_model = correlation_model
self.samples = samples
# `rupture` can be a high level rupture object containing a low
# level hazardlib rupture object as a .rupture attribute
if hasattr(rupture, 'rupture'):
rupture = rupture.rupture
self.salt = collections.Counter() # associate a salt to the gsims
self.ctx = ContextMaker(gsims).make_contexts(sites, rupture)

def compute(self, gsim, num_events, seed=None):
Expand All @@ -111,9 +109,6 @@ def compute(self, gsim, num_events, seed=None):
seed = seed or self.rupture.rupture.seed
except AttributeError:
pass
if hasattr(self, 'salt'): # when called from the engine
seed += self.salt[gsim]
self.salt[gsim] += 1
if seed is not None:
numpy.random.seed(seed)
result = numpy.zeros(
Expand Down
22 changes: 22 additions & 0 deletions openquake/hazardlib/geo/mesh.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,28 @@ def build_array(lons_lats_depths):
return arr


def surface_to_mesh(surface):
"""
:param surface: a Surface object
:returns: a 3D array of dtype point3d
"""
if hasattr(surface, 'surfaces'): # multiplanar surfaces
n = len(surface.surfaces)
arr = build_array([[s.corner_lons, s.corner_lats, s.corner_depths]
for s in surface.surfaces]).reshape(n, 2, 2)
else:
mesh = surface.mesh
if mesh is None: # planar surface
arr = build_array([[surface.corner_lons,
surface.corner_lats,
surface.corner_depths]]).reshape(1, 2, 2)
else: # general surface
shp = (1,) + mesh.lons.shape
arr = build_array(
[[mesh.lons, mesh.lats, mesh.depths]]).reshape(shp)
return arr


class Mesh(object):
"""
Mesh object represent a collection of points and provides the most
Expand Down
6 changes: 3 additions & 3 deletions openquake/hazardlib/geo/surface/planar.py
Original file line number Diff line number Diff line change
Expand Up @@ -660,7 +660,7 @@ def get_middle_point(self):

def get_surface_boundaries(self):
"""
The corners lons/lats
The corners lons/lats in WKT-friendly order (clockwise)
"""
return [self.corner_lons.take([0, 1, 2, 3, 0])], \
[self.corner_lats.take([0, 1, 2, 3, 0])]
return [self.corner_lons.take([0, 1, 3, 2, 0])], \
[self.corner_lats.take([0, 1, 3, 2, 0])]
2 changes: 1 addition & 1 deletion openquake/hazardlib/source/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
Package :mod:`openquake.hazardlib.source` deals with various types
of seismic sources.
"""
from openquake.hazardlib.source.rupture import Rupture, \
from openquake.hazardlib.source.rupture import BaseRupture, \
ParametricProbabilisticRupture, NonParametricProbabilisticRupture
from openquake.hazardlib.source.point import PointSource
from openquake.hazardlib.source.area import AreaSource
Expand Down
Loading

0 comments on commit 237dbca

Please sign in to comment.