Skip to content
This repository has been archived by the owner on Apr 23, 2021. It is now read-only.

Commit

Permalink
Merge pull request #7 from simphony/common-0.5.0
Browse files Browse the repository at this point in the history
Bumped version to 0.5.0
  • Loading branch information
stefanoborini committed Jan 18, 2017
2 parents 1f91328 + bca859a commit 2cba3bf
Show file tree
Hide file tree
Showing 11 changed files with 84 additions and 84 deletions.
2 changes: 1 addition & 1 deletion .travis.yml
Expand Up @@ -9,7 +9,7 @@ cache:

env:
- SIMPHONY_VERSION=master
- SIMPHONY_VERSION=0.4.0
- SIMPHONY_VERSION=0.5.0
matrix:
allow_failures:
- env: SIMPHONY_VERSION=master
Expand Down
3 changes: 1 addition & 2 deletions bench/wrapper_bench.py
Expand Up @@ -3,7 +3,6 @@
from simphony.engine import liggghts
from simphony.bench.util import bench
from simphony.core.cuba import CUBA
from simphony.core.cuds_item import CUDSItem

from .util import get_particles
from ..testing.md_example_configurator import MDExampleConfigurator
Expand Down Expand Up @@ -80,7 +79,7 @@ def run_test(func, wrapper):
# test different run scenarios
particles = get_particles(y_range)
number_particles = sum(p.count_of(
CUDSItem.PARTICLE) for p in particles)
CUBA.PARTICLE) for p in particles)
number_time_steps = 10

for test in run_wrapper_tests:
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Expand Up @@ -33,6 +33,6 @@ def write_version_py(filename=None):
entry_points={
'simphony.engine': ['liggghts = simliggghts']},
packages=find_packages(),
install_requires=["simphony ~= 0.4",
install_requires=["simphony>0.4,<0.6",
"pyyaml >= 3.11"]
)
2 changes: 1 addition & 1 deletion simliggghts/internal/liggghts_internal_data_manager.py
Expand Up @@ -161,7 +161,7 @@ def _handle_new_particles(self, uname, particles):
if CUBA.MATERIAL_TYPE not in self._pc_data[uname]:
raise ValueError("Missing the required CUBA.MATERIAL_TYPE")

self._add_atoms(iterable=particles.iter_particles(),
self._add_atoms(iterable=particles.iter(item_type=CUBA.PARTICLE),
uname=uname,
safe=True)

Expand Down
7 changes: 3 additions & 4 deletions simliggghts/io/file_utility.py
@@ -1,7 +1,6 @@
from simphony.cuds.particles import Particle, Particles
from simphony.core.data_container import DataContainer
from simphony.core.cuba import CUBA
from simphony.core.cuds_item import CUDSItem

from ..common import globals
from .liggghts_data_file_parser import LiggghtsDataFileParser
Expand Down Expand Up @@ -93,7 +92,7 @@ def read_data_file(filename, atom_style=None):
atom_type = p.data[CUBA.MATERIAL_TYPE]
del p.data[CUBA.MATERIAL_TYPE]

type_to_particles_map[atom_type].add_particles([p])
type_to_particles_map[atom_type].add([p])

return type_to_particles_map.values()

Expand Down Expand Up @@ -122,7 +121,7 @@ def write_data_file(filename, particles_list, atom_style=AtomStyle.GRANULAR):
"""

num_particles = sum(
pc.count_of(CUDSItem.PARTICLE) for pc in particles_list)
pc.count_of(CUBA.PARTICLE) for pc in particles_list)
types = set(pc.data[CUBA.MATERIAL_TYPE] for pc in particles_list)

box = get_box([pc.data_extension for pc in particles_list])
Expand All @@ -140,7 +139,7 @@ def write_data_file(filename, particles_list, atom_style=AtomStyle.GRANULAR):

for particles in particles_list:
material_type = particles.data[CUBA.MATERIAL_TYPE]
for p in particles.iter_particles():
for p in particles.iter(item_type=CUBA.PARTICLE):
writer.write_atom(p, material_type)
writer.close()

Expand Down
31 changes: 15 additions & 16 deletions simliggghts/io/liggghts_fileio_data_manager.py
@@ -1,7 +1,6 @@
import os

from simphony.core.cuba import CUBA
from simphony.core.cuds_item import CUDSItem
from simphony.core.data_container import DataContainer
from simphony.cuds.particles import Particles, Particle

Expand Down Expand Up @@ -145,11 +144,11 @@ def _handle_new_particles(self, uname, particles):
pc = Particles(name="_")
pc.data = DataContainer(particles.data)

for p in particles.iter_particles():
pc.add_particles([p])
for p in particles.iter(item_type=CUBA.PARTICLE):
pc.add([p])

for b in particles.iter_bonds():
pc.add_bonds([b])
for b in particles.iter(item_type=CUBA.BOND):
pc.add([b])

self._pc_cache[uname] = pc

Expand All @@ -170,24 +169,24 @@ def get_particle(self, uid, uname):
name of particle container
"""
return self._pc_cache[uname].get_particle(uid)
return self._pc_cache[uname].get(uid)

def update_particles(self, iterable, uname):
"""Update particles
"""
self._pc_cache[uname].update_particles(
self._pc_cache[uname].update(
_filter_unsupported_data(iterable, self._supported_cuba))

def add_particles(self, iterable, uname):
"""Add particles
"""
uids = self._pc_cache[uname].add_particles(iterable)
uids = self._pc_cache[uname].add(iterable)

# filter the cached particles of unsupported CUBA
self._pc_cache[uname].update_particles(_filter_unsupported_data(
self._pc_cache[uname].iter_particles(uids), self._supported_cuba))
self._pc_cache[uname].update(_filter_unsupported_data(
self._pc_cache[uname].iter(uids), self._supported_cuba))

return uids

Expand All @@ -202,7 +201,7 @@ def remove_particle(self, uid, uname):
name of particle container
"""
self._pc_cache[uname].remove_particles([uid])
self._pc_cache[uname].remove([uid])

def has_particle(self, uid, uname):
"""Has particle
Expand All @@ -215,7 +214,7 @@ def has_particle(self, uid, uname):
name of particle container
"""
return self._pc_cache[uname].has_particle(uid)
return self._pc_cache[uname].has(uid)

def iter_particles(self, uname, uids=None):
"""Iterate over the particles of a certain type
Expand All @@ -227,7 +226,7 @@ def iter_particles(self, uname, uids=None):
uids is None then all particles will be iterated over.
"""
return self._pc_cache[uname].iter_particles(uids)
return self._pc_cache[uname].iter(uids, item_type=CUBA.PARTICLE)

def number_of_particles(self, uname):
"""Get number of particles in a container
Expand All @@ -238,7 +237,7 @@ def number_of_particles(self, uname):
non-changing unique name of particles
"""
return self._pc_cache[uname].count_of(CUDSItem.PARTICLE)
return self._pc_cache[uname].count_of(CUBA.PARTICLE)

def flush(self, input_data_filename):
"""flush to file
Expand Down Expand Up @@ -308,7 +307,7 @@ def _update_from_liggghts(self, output_data_filename):
for liggghts_id, values in atoms.iteritems():
uname, uid = self._liggghtsid_to_uid[liggghts_id]
cache_pc = self._pc_cache[uname]
p = cache_pc.get_particle(uid)
p = cache_pc.get(uid)
p.coordinates, p.data = interpreter.convert_atom_values(values)
p.data.update(
interpreter.convert_velocity_values(velocities[liggghts_id]))
Expand Down Expand Up @@ -337,7 +336,7 @@ def _write_data_file(self, filename):
# in oder to determine the number of types
num_particles = sum(
pc.count_of(
CUDSItem.PARTICLE) for pc in self._pc_cache.itervalues())
CUBA.PARTICLE) for pc in self._pc_cache.itervalues())
types = set(pc.data[CUBA.MATERIAL_TYPE]
for pc in self._pc_cache.itervalues())

Expand Down
19 changes: 10 additions & 9 deletions simliggghts/io/tests/test_file_utility.py
Expand Up @@ -6,7 +6,6 @@
from numpy.testing import assert_almost_equal

from simphony.core.cuba import CUBA
from simphony.core.cuds_item import CUDSItem
from simphony.core.keywords import KEYWORDS

from simliggghts.io.file_utility import (read_data_file,
Expand Down Expand Up @@ -39,8 +38,8 @@ def test_read_sphere_style_data_file(self):

particles1 = particles_list[0]
particles2 = particles_list[1]
self.assertEqual(2, particles1.count_of(CUDSItem.PARTICLE))
self.assertEqual(1, particles2.count_of(CUDSItem.PARTICLE))
self.assertEqual(2, particles1.count_of(CUBA.PARTICLE))
self.assertEqual(1, particles2.count_of(CUBA.PARTICLE))
self.assertEqual(str(particles1.data[CUBA.MATERIAL_TYPE]),
particles1.name)
assert_almost_equal(
Expand All @@ -53,7 +52,7 @@ def test_read_sphere_style_data_file(self):
particles1.data_extension[CUBAExtension.BOX_VECTORS],
box)

for p in particles1.iter_particles():
for p in particles1.iter(item_type=CUBA.PARTICLE):
assert_almost_equal(p.data[CUBA.ANGULAR_VELOCITY], [0.0, 0.0, 1.0])
assert_almost_equal(p.data[CUBA.VELOCITY], [5.0, 0.0, 0.0])
assert_almost_equal(p.data[CUBA.RADIUS], 0.5/2)
Expand Down Expand Up @@ -107,8 +106,8 @@ def _compare_particles_averages(particles,
"""
self = testcase

len_particles = particles.count_of(CUDSItem.PARTICLE)
len_reference = reference.count_of(CUDSItem.PARTICLE)
len_particles = particles.count_of(CUBA.PARTICLE)
len_reference = reference.count_of(CUBA.PARTICLE)
self.assertEqual(len_particles, len_reference)
for key in attributes_keys:
average_particles = _get_average_value(particles, key)
Expand All @@ -117,14 +116,16 @@ def _compare_particles_averages(particles,


def _get_average_value(particles, key):
length = particles.count_of(CUDSItem.PARTICLE)
length = particles.count_of(CUBA.PARTICLE)

keyword = KEYWORDS[CUBA(key).name]
if keyword.shape == [1]:
return sum(p.data[key] for p in particles.iter_particles())/length
return sum(p.data[key] for p in particles.iter(
item_type=CUBA.PARTICLE))/length
else:
return tuple(map(lambda y: sum(y) / float(len(y)), zip(
*[p.data[key] for p in particles.iter_particles()])))
*[p.data[key] for p in particles.iter(
item_type=CUBA.PARTICLE)])))


_explicit_sphere_style_file_contents = """LIGGGHTS data file via write_data, version 28 Jun 2014, timestep = 25000
Expand Down
42 changes: 22 additions & 20 deletions simliggghts/liggghts_particles.py
@@ -1,5 +1,5 @@
from simphony.core.cuba import CUBA
from simphony.cuds.abc_particles import ABCParticles
from simphony.core.cuds_item import CUDSItem


class LiggghtsParticles(ABCParticles):
Expand Down Expand Up @@ -46,7 +46,7 @@ def data_extension(self, value):

# Particle methods ######################################################

def add_particles(self, iterable):
def _add_particles(self, iterable):
"""Adds a set of particles from the provided iterable
to the container.
Expand Down Expand Up @@ -75,32 +75,32 @@ def add_particles(self, iterable):
"""
return self._manager.add_particles(iterable, self._uname)

def update_particles(self, iterable):
def _update_particles(self, iterable):
"""Update particles
"""
self._manager.update_particles(iterable, self._uname)

def get_particle(self, uid):
def _get_particle(self, uid):
"""Get particle
"""
return self._manager.get_particle(uid, self._uname)

def remove_particles(self, uids):
def _remove_particles(self, uids):
"""Remove particles
"""
for uid in uids:
self._manager.remove_particle(uid, self._uname)

def has_particle(self, uid):
def _has_particle(self, uid):
"""Has particle
"""
return self._manager.has_particle(uid, self._uname)

def iter_particles(self, uids=None):
def _iter_particles(self, uids=None):
"""Get iterator over particles
"""
Expand All @@ -109,37 +109,39 @@ def iter_particles(self, uids=None):

# Bond methods #######################################################

def add_bonds(self, bonds):
def _add_bonds(self, bonds):
"""Add bonds
"""
raise NotImplementedError

def update_bonds(self, bonds):
def _update_bonds(self, bonds):
"""Update particle
"""
raise NotImplementedError

def get_bond(self, uid):
def _get_bond(self, uid):
"""Get bond
"""
raise NotImplementedError
raise KeyError("get bond not implemented. "
"uid {} not found".format(uid))

def remove_bonds(self, uid):
def _remove_bonds(self, uid):
"""Remove bond
"""
raise NotImplementedError
raise KeyError("remove bond not implemented. "
"uid {} not found".format(uid))

def has_bond(self, uid):
def _has_bond(self, uid):
"""Has bond
"""
raise NotImplementedError
return False

def iter_bonds(self, uids=None):
def _iter_bonds(self, uids=None):
"""Get iterator over bonds
"""
Expand All @@ -152,8 +154,8 @@ def count_of(self, item_type):
Parameters
----------
item_type : CUDSItem
The CUDSItem enum of the type of the items to return the count of.
item_type : CUBA enum
The CUBA enum of the type of the items to return the count of.
Returns
-------
Expand All @@ -167,9 +169,9 @@ def count_of(self, item_type):
container.
"""
if item_type == CUDSItem.PARTICLE:
if item_type == CUBA.PARTICLE:
return self._manager.number_of_particles(self._uname)
elif item_type == CUDSItem.BOND:
elif item_type == CUBA.BOND:
return 0
else:
error_str = "Trying to obtain count a of non-supported item: {}"
Expand Down

0 comments on commit 2cba3bf

Please sign in to comment.