Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/master' into simstore-simplify
Browse files Browse the repository at this point in the history
  • Loading branch information
dwhswenson committed Dec 23, 2020
2 parents 4267456 + 9495b54 commit 8a39b66
Show file tree
Hide file tree
Showing 27 changed files with 805 additions and 321 deletions.
12 changes: 11 additions & 1 deletion openpathsampling/engines/features/kinetics.py
Expand Up @@ -7,6 +7,16 @@

dimensions = ['n_atoms', 'n_spatial']

_vel_unit = "simtk(unit.nanometer/unit.picosecond)"
_vel_str = "ndarray.float32({n_atoms},{n_spatial})"
schema_entries = [
('kinetics', [
('velocities', _vel_unit + "*" + _vel_str),
('engine', 'uuid'),
]),
('is_reversed', 'bool'),
]


def netcdfplus_init(store):
kinetic_store = KineticContainerStore()
Expand Down Expand Up @@ -50,7 +60,7 @@ def velocities(self):
@velocities.setter
def velocities(self, value):
if value is not None:
kc = KineticContainer(velocities=value)
kc = KineticContainer(velocities=value, engine=self.engine)
else:
kc = None

Expand Down
77 changes: 17 additions & 60 deletions openpathsampling/engines/features/shared.py
Expand Up @@ -27,6 +27,7 @@ def unmask_quantity(quantity):
return quantity
return np.array(quantity.value_in_unit(q_unit)) * q_unit


# =============================================================================
# SIMULATION CONFIGURATION
# =============================================================================
Expand All @@ -36,53 +37,24 @@ class StaticContainer(StorableObject):
Simulation configuration. Only Coordinates, the associated boxvectors
and the potential_energy
Attributes
Parameters
----------
coordinates : simtk.unit.Quantity wrapping Nx3 np array of dimension length
atomic coordinates
box_vectors : periodic box vectors
the periodic box vectors
engine : :class:`.DynamicsEngine`
the engine that creating this data
"""

# Class variables to store the global storage and the system context
# describing the system to be saved as configuration_indices

def __init__(self, coordinates, box_vectors):
"""
Create a simulation configuration from either an OpenMM context or
individually-specified components.
Parameters
----------
coordinates
box_vectors
"""

def __init__(self, coordinates, box_vectors, engine=None):
super(StaticContainer, self).__init__()

self.coordinates = copy.deepcopy(coordinates)
self.box_vectors = copy.deepcopy(box_vectors)

# if self.coordinates is not None:
# # Check for nans in coordinates, and raise an exception if
# # something is wrong.
# if type(self.coordinates) is unit.Quantity:
# coords = self.coordinates._value
# else:
# coords = self.coordinates
#
# if np.any(np.isnan(coords)):
# bad_atoms = [i for i in range(len(coords))
# if np.any(np.isnan(coords[i]))]
# raise ValueError("Coordinates went 'nan' for atoms: " +
# str(bad_atoms))

return

# =========================================================================
# Comparison functions
# =========================================================================
self.engine = engine

@property
def n_atoms(self):
Expand All @@ -91,10 +63,6 @@ def n_atoms(self):
"""
return self.coordinates.shape[0]

# =========================================================================
# Utility functions
# =========================================================================

def copy(self):
"""
Returns a deep copy of the instance itself using a subset of coordinates.
Expand All @@ -108,10 +76,11 @@ def copy(self):
"""

return StaticContainer(coordinates=self.coordinates,
box_vectors=self.box_vectors
)
box_vectors=self.box_vectors,
engine=self.engine)

def to_dict(self):
# note: to_dict not used here in SimStore, so no need to change
return {
'coordinates': self.coordinates,
'box_vectors': self.box_vectors
Expand Down Expand Up @@ -149,7 +118,8 @@ def _load(self, idx):
if not np.count_nonzero(box_vectors):
box_vectors = None

configuration = StaticContainer(coordinates=coordinates, box_vectors=box_vectors)
configuration = StaticContainer(coordinates=coordinates,
box_vectors=box_vectors)

return configuration

Expand Down Expand Up @@ -214,26 +184,14 @@ class KineticContainer(StorableObject):
----------
velocities : simtk.unit.Quantity wrapping Nx3 np array of dimension length
atomic velocities
engine : :class:`.DynamicsEngine`
the engine that creating this data
"""

def __init__(self, velocities):
"""
Create a simulation momentum from either an OpenMM context or
individually-specified components.
Parameters
----------
velocities
"""

def __init__(self, velocities, engine=None):
super(KineticContainer, self).__init__()

self.velocities = copy.deepcopy(velocities)

# =========================================================================
# Utility functions
# =========================================================================
self.engine = engine

def copy(self):
"""
Expand All @@ -245,9 +203,8 @@ def copy(self):
Momentum()
the shallow copy
"""

this = KineticContainer(velocities=self.velocities)

this = KineticContainer(velocities=self.velocities,
engine=self.engine)
return this

def to_dict(self):
Expand Down
27 changes: 21 additions & 6 deletions openpathsampling/engines/features/statics.py
Expand Up @@ -10,10 +10,21 @@

dimensions = ['n_atoms', 'n_spatial']

schema_entries = [(
'statics', [('coordinates', 'ndarray.float32({n_atoms},{n_spatial})'),
('box_vectors', 'ndarray.float32({n_spatial},{n_spatial})')]
)]
_length_unit = "simtk(unit.nanometer)"
_array32 = "ndarray.float32"
schema_entries = [
('statics', [
('coordinates',
'{length_unit}*{array32}({{n_atoms}},{{n_spatial}})'.format(
length_unit=_length_unit, array32=_array32
)),
('box_vectors',
'{length_unit}*{array32}({{n_spatial}},{{n_spatial}})'.format(
length_unit=_length_unit, array32=_array32
)),
('engine', 'uuid'),
]),
]


def netcdfplus_init(store):
Expand Down Expand Up @@ -52,7 +63,9 @@ def coordinates(snapshot):
@coordinates.setter
def coordinates(self, value):
if value is not None:
sc = StaticContainer(coordinates=value, box_vectors=self.box_vectors)
sc = StaticContainer(coordinates=value,
box_vectors=self.box_vectors,
engine=self.engine)
else:
sc = None

Expand All @@ -77,7 +90,9 @@ def box_vectors(snapshot):
@box_vectors.setter
def box_vectors(self, value):
if value is not None:
sc = StaticContainer(box_vectors=value, coordinates=self.coordinates)
sc = StaticContainer(box_vectors=value,
coordinates=self.coordinates,
engine=self.engine)
else:
sc = None

Expand Down
9 changes: 7 additions & 2 deletions openpathsampling/engines/openmm/snapshot.py
Expand Up @@ -93,10 +93,15 @@ def construct(
if statics is None:
statics = Snapshot.StaticContainer(
coordinates=coordinates,
box_vectors=box_vectors)
box_vectors=box_vectors,
engine=engine
)

if kinetics is None:
kinetics = Snapshot.KineticContainer(velocities=velocities)
kinetics = Snapshot.KineticContainer(
velocities=velocities,
engine=engine
)

return Snapshot(
engine=engine,
Expand Down
6 changes: 4 additions & 2 deletions openpathsampling/engines/openmm/tools.py
Expand Up @@ -256,9 +256,11 @@ def trajectory_from_mdtraj(mdtrajectory, simple_topology=False,

statics = Snapshot.StaticContainer(
coordinates=coord,
box_vectors=box_v
box_vectors=box_v,
engine=engine
)
kinetics = Snapshot.KineticContainer(velocities=vel)
kinetics = Snapshot.KineticContainer(velocities=vel,
engine=engine)

snap = Snapshot(
statics=statics,
Expand Down
144 changes: 144 additions & 0 deletions openpathsampling/experimental/simstore/attribute_handlers.py
@@ -0,0 +1,144 @@
from .my_types import parse_ndarray_type
import numpy as np


class AttributeHandler(object):
"""Abstract object to handler a given attribute type.
Each attribute in a schema entry will generate an instance of this for
the appropriate type. Different subclasses handle different types that
are supported by SimStore.
Note that you'll usually use this with the :method:`.from_type_string`
constructor, which returns None if the type string doesn't match what
this class handles.
"""
def __init__(self, type_info):
self.type_info = type_info

@staticmethod
def is_my_type(type_str):
"""Returns type info (possibly tuple) if true, None if false"""
raise NotImplementedError()

@classmethod
def from_type_string(cls, type_str):
"""Generate attribute handler based on a given type string.
Parameters
----------
type_str: str
the specific type string in a format understood by SimStore
Returns
-------
:class:`.AttributeHandler` or None:
an attribute handler for the given type string, or None if this
class doesn't know how to handle that type
"""
type_info = cls.is_my_type(type_str)
if type_info:
return cls(type_info)

def serialize(self, obj):
"""Serialize the object.
Default serialization just returns the object.
Parameters
----------
obj : Any
object to be serialized
Returns
-------
Any:
version ready to be written to disk; typically str or bytes but
standard types are also allowed (bool, float, int, etc.)
"""
return obj

def deserialize(self, data, caches=None):
"""Deserialize the serialized form.
Parameters
----------
data: Any
bytes to deserialize (usually as str or bytes)
caches: Dict
mapping of identifiers to known objects that might be components
of this object; mainly used when deserializing UUID objects.
"""
return data


class StandardHandler(AttributeHandler):
"""Attribute handler for common standard types
"""
standard_types = ['str', 'int', 'float', 'function']
def __init__(self, type_info):
super().__init__(type_info)
self.backend_type = type_info
self.type_size = None

@classmethod
def is_my_type(cls, type_str):
if type_str in cls.standard_types:
return type_str


# TODO: these have not yet been implemented, but this is how it should all
# work -- everything should be managed through attribute handlers, instead
# of separate functions for serialize/deserialize
class UUIDHandler(StandardHandler):
standard_types = ['uuid', 'lazy']
def serialize(self, obj):
pass

def deserialize(self, data, caches=None):
pass


class ListUUIDHandler(StandardHandler):
standard_types = ['list_uuid']
def serialize(self, obj):
pass

def deserialize(self, data, caches=None):
pass


class JSONObjHandler(StandardHandler):
standard_types = ['json_obj']
def serialize(self, obj):
pass

def deserialize(self, data, caches=None):
pass


class NDArrayHandler(AttributeHandler):
"""Attribute handler for NumPy ndarrays.
Parameters
----------
type_info: Tuple
dtype and shape of the array
"""
def __init__(self, type_info):
super().__init__(type_info)
self.dtype, self.shape = type_info
self.backend_type = 'ndarray'
self.type_size = None # TODO: change this based on dtype/shape

@classmethod
def is_my_type(cls, type_str):
return parse_ndarray_type(type_str)

def serialize(self, obj):
return obj.astype(dtype=self.dtype, copy=False).tostring()

def deserialize(self, data, caches=None):
return np.fromstring(data, dtype=self.dtype).reshape(self.shape)

DEFAULT_HANDLERS = [NDArrayHandler, StandardHandler]

0 comments on commit 8a39b66

Please sign in to comment.