Skip to content

Commit

Permalink
Merge pull request #573 from swails/ommparams
Browse files Browse the repository at this point in the history
merge master
  • Loading branch information
swails committed Jan 27, 2016
2 parents cdb7995 + 1a737b1 commit 536a7a7
Show file tree
Hide file tree
Showing 31 changed files with 5,859 additions and 3,062 deletions.
19 changes: 3 additions & 16 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,32 +8,19 @@ addons:
packages:
- g++
- gromacs
- pypy

matrix:
include:
- { os: linux, env: PYTHON_VERSION=2.7 }
- { os: linux, env: PYTHON_VERSION=3.3 MINIMAL_PACKAGES=yes }
- { os: linux, env: PYTHON_VERSION=3.4 }
- { os: linux, env: PYTHON_VERSION=3.5 MINIMAL_PACKAGES=yes }
- { os: linux, env: PYTHON_VERSION=pypy }
- { os: osx, env: PYTHON_VERSION=3.4 MINIMAL_PACKAGES=yes }
- { os: osx, env: PYTHON_VERSION=3.5 MINIMAL_PACKAGES=yes }

install:
- source devtools/travis-ci/install.sh
# install our package
- python setup.py install

script:
# Skip tests that use a lot of memory so Travis doesn't sporadically kill
# them. Also fix the number of running threads to 1.
- export PARMED_SKIP_BIG_TESTS=1
- export OPENMM_CPU_THREADS=1
- echo "Checking parmed source with pyflakes linter"
- if [ "$PYTHON_VERSION" = "pypy" ]; then export PYENV_ROOT="${HOME}/.pyenv"; fi
- if [ "$PYTHON_VERSION" = "pypy" ]; then export PATH="${PYENV_ROOT}/bin:${PATH}"; fi
- if [ "$PYTHON_VERSION" = "pypy" ]; then eval "$(pyenv init -)"; fi
- devtools/travis-ci/pyflakes_check.sh
- cd test
- echo "Using `which nosetests`::"
- which nosetests
- nosetests -vs --with-timer --timer-ok=5s --timer-warning=12s .
- source devtools/travis-ci/runtest.sh
2 changes: 1 addition & 1 deletion MANIFEST.in
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
graft examples
graft src
graft test
graft parmed
9 changes: 5 additions & 4 deletions devtools/travis-ci/install.sh
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,9 @@ if [ "$PYTHON_VERSION" = "pypy" ]; then
pyenv install pypy-4.0.1
pyenv global pypy-4.0.1

pypy -m pip install nose coverage pyflakes nose-timer
pypy -m pip install nose pyflakes nose-timer
which pyflakes
pypy -m pip install --user git+https://bitbucket.org/pypy/numpy.git
pypy -m pip install --user git+https://bitbucket.org/pypy/numpy.git@pypy-4.0.1
else # Otherwise, CPython... go through conda
if [ "$TRAVIS_OS_NAME" = "osx" ]; then
wget http://repo.continuum.io/miniconda/Miniconda-3.7.0-MacOSX-x86_64.sh -O miniconda.sh;
Expand All @@ -25,12 +25,13 @@ else # Otherwise, CPython... go through conda

if [ -z "$MINIMAL_PACKAGES" ]; then
conda create -y -n myenv python=$PYTHON_VERSION \
numpy scipy pandas nose openmm pyflakes coverage nose-timer
numpy scipy pandas nose openmm pyflakes coverage nose-timer \
python-coveralls
conda update -y -n myenv --all
else
# Do not install the full numpy/scipy stack
conda create -y -n myenv python=$PYTHON_VERSION numpy nose pyflakes \
coverage nose-timer
coverage nose-timer python-coveralls
fi

source activate myenv
Expand Down
36 changes: 36 additions & 0 deletions devtools/travis-ci/runtest.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
#!/bin/sh
set -e

do_coverage() {
echo "Combining coverage data"
coverage combine
echo "Reporting..."
coverage report -m
}

echo "Checking parmed source with pyflakes linter"
if [ "$PYTHON_VERSION" = "pypy" ]; then
export PYENV_ROOT="${HOME}/.pyenv"
export PATH="${PYENV_ROOT}/bin:${PATH}"
eval "$(pyenv init -)"
fi
sh devtools/travis-ci/pyflakes_check.sh
cd test
echo "Using nosetests...:"
./run_scripts.sh
if [ "$PYTHON_VERSION" = "pypy" ]; then
# Disable coverage with pypy, since it multiplies the time taken by 6 or
# something ridiculous like that
nosetests -vs --with-timer --timer-ok=5s --timer-warning=12s \
--timer-filter=warning,error .
else
# Run nose under coverage, since that allows getting the full flexibility of
# the coverage package without sacrificing nose functionality
coverage run --source=parmed --parallel-mode -m \
nose -vs --with-timer --timer-ok=5s --timer-warning=12s \
--timer-filter=warning,error .
fi
test -z `which coverage 2>/dev/null` || do_coverage
echo "Running coveralls"
test -z `which coveralls` || coveralls
echo "Done!"
26 changes: 13 additions & 13 deletions doc/devdoc.rst
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,9 @@ is clearly not being run, or not being run correctly (which means there is no
way that your test could prevent a breakage in the future).

Once you have a failing test case, write code until the test case passes. Then
add more test until the test case fails again. Then update the test case until
it fails again. Rinse-and-repeat until the functionality you are adding is
complete.
add more test until the test case fails again. Then update the code until the
test passes again. Rinse-and-repeat until the functionality you are adding is
complete and fully-tested.

Coding Style
------------
Expand All @@ -40,7 +40,7 @@ the following I consider very important (and will block any PRs):
- Please make sure comments and docstrings are written in English
- Use only absolute imports or *explicit* relative imports
- Always use ``print`` as a function (via the ``print_function`` futures
import)
import) -- see below for why this is a hard requirement.

Things I would like to see, but are not as important:

Expand All @@ -62,8 +62,8 @@ package, and all components should be imported from there.
In particular, the ``range`` and ``zip`` builtins should be imported from
``parmed.utils.six.moves`` rather than relying on the standard versions. This is
because in Python 3, ``range`` and ``zip`` return efficient iterators, while in
Python 2 they return (sometimes *very* inefficient lists). The Python
2-equivalent versions of the ``range`` and ``zip`` iterators are ``xrange`` and
Python 2 they return potentially inefficient lists. The Python 2-equivalent
versions of the ``range`` and ``zip`` iterators are ``xrange`` and
``itertools.izip``, respectively, which are the *actual* functions defined
within ``parmed.utils.six.moves`` for Python 2.

Expand Down Expand Up @@ -108,14 +108,14 @@ Obviously in this case, ``test_parmed_structure.py`` is replaced with whichever
test module you are working on. You can select *specific* tests using the ``-m``
flag specifying a regex that matches the test case method. For example::

nosetests -vs test/test_parmed_structure.py -m AddAtom
nosetests -vs test/test_parmed_structure.py -m add_atom

will test both the ``tesAddAtom`` and ``testAddAtomToResidue`` methods. This is
an easy way to run tests quickly while working on new methods *without* having
to run ``python setup.py install`` after every change. Note that when you run
tests from the root ParmEd directory, however, the imported ParmEd repository
will not have any Python extensions installed (meaning that the tests relying on
them -- like the test for the Amber optimized reader -- will fail).
will test both the ``test_add_atom`` and ``test_add_atom_to_residue`` methods.
This is an easy way to run tests quickly while working on new methods *without*
having to run ``python setup.py install`` after every change. Note that when you
run tests from the root ParmEd directory, however, the imported ParmEd
repository will not have any Python extensions installed (meaning that the tests
relying on them -- like the test for the Amber optimized reader -- will fail).

ParmEd utilizes the Travis continuous integration server to perform automatic
tests of all pull requests. Tests generally must pass these tests before being
Expand Down
4 changes: 2 additions & 2 deletions doc/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -151,9 +151,9 @@ A simple example demonstrating the file conversion capabilities is to convert a
PDBx/mmCIF file into the more commonly supported PDB format::

>>> import parmed as pmd
>>> pmd.write_PDB(pmd.download_CIF('4lzt'), '4lzt.pdb')
>>> pmd.download_CIF('4lzt').save('4lzt.pdb')
>>> # Now read in the PDB file we just created
... pmd.read_PDB('4lzt.pdb')
... pmd.load_file('4lzt.pdb')
<Structure 1164 atoms; 274 residues; 0 bonds; PBC; NOT parametrized>

Program and API Reference
Expand Down
7 changes: 4 additions & 3 deletions parmed/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
# Version format should be "major.minor.patch". For beta releases, attach
# "-beta#" to the end. The beta number will be turned into another number in the
# version tuple
__version__ = '2.1.6'
__version__ = '2.1.11'
__author__ = 'Jason Swails'

__all__ = ['exceptions', 'periodic_table', 'residue', 'unit', 'utils',
Expand All @@ -23,11 +23,12 @@
from parmed import formats
from parmed.vec3 import Vec3
from parmed.parameters import ParameterSet
from parmed.utils.decorators import deprecated as _deprecated
load_file = formats.load_file
read_PDB = formats.PDBFile.parse
read_CIF = formats.CIFFile.parse
write_PDB = formats.PDBFile.write
write_CIF = formats.CIFFile.write
write_PDB = _deprecated(formats.PDBFile.write)
write_CIF = _deprecated(formats.CIFFile.write)
load_rosetta = rosetta.RosettaPose.load

download_PDB = formats.PDBFile.download
Expand Down
26 changes: 15 additions & 11 deletions parmed/amber/_amberparm.py
Original file line number Diff line number Diff line change
Expand Up @@ -842,12 +842,12 @@ def has_NBFIX(self):
If True, off-diagonal elements in the combined Lennard-Jones matrix
exist. If False, they do not.
"""
assert self.combining_rule in ('lorentz', 'geometric'), \
"Unrecognized combining rule"
if self.combining_rule == 'lorentz':
comb_sig = lambda sig1, sig2: 0.5 * (sig1 + sig2)
elif self.combining_rule == 'geometric':
comb_sig = lambda sig1, sig2: sqrt(sig1 * sig2)
# else:
assert self.combining_rule in ('lorentz', 'geometric'), "Unrecognized combining rule"
fac = 2**(-1/6) * 2
LJ_sigma = [x*fac for x in self.LJ_radius]
pd = self.parm_data
Expand Down Expand Up @@ -978,6 +978,8 @@ def omm_nonbonded_force(self, nonbondedMethod=None,
hasnbfix = self.has_NBFIX()
has1264 = 'LENNARD_JONES_CCOEF' in self.flag_list
if not hasnbfix and not has1264 and not has1012:
if self.chamber:
self._modify_nonb_exceptions(nonbfrc, None)
return nonbfrc

# If we have NBFIX, omm_nonbonded_force returned a tuple
Expand Down Expand Up @@ -1741,8 +1743,9 @@ def _modify_nonb_exceptions(self, nonbfrc, customforce):
exclusions. The exceptions on the nonbonded force might need to be
adjusted if off-diagonal modifications on the L-J matrix are present
"""
# To get into this routine, we already needed to know that nbfix is
# present
# To get into this routine, either NBFIX is present OR this is a chamber
# prmtop and we need to pull the 1-4 L-J parameters from the
# LENNARD_JONES_14_A/BCOEF arrays
length_conv = u.angstroms.conversion_factor_to(u.nanometers)
ene_conv = u.kilocalories.conversion_factor_to(u.kilojoules)
atoms = self.atoms
Expand All @@ -1762,7 +1765,8 @@ def _modify_nonb_exceptions(self, nonbfrc, customforce):
ee.value_in_unit(u.kilocalories_per_mole) == 0):
# Copy this exclusion as-is... no need to modify the nonbfrc
# exception parameters
customforce.addExclusion(i, j)
if customforce is not None:
customforce.addExclusion(i, j)
continue
# Figure out what the 1-4 scaling parameters were for this pair...
unscaled_ee = sqrt(self.atoms[i].epsilon_14 *
Expand All @@ -1788,7 +1792,8 @@ def _modify_nonb_exceptions(self, nonbfrc, customforce):
epsilon = b / (2 * rmin**6) * ene_conv * one_scnb
sigma = rmin * sigma_scale
nonbfrc.setExceptionParameters(ii, i, j, qq, sigma, epsilon)
customforce.addExclusion(i, j)
if customforce is not None:
customforce.addExclusion(i, j)

#===================================================

Expand Down Expand Up @@ -1862,12 +1867,11 @@ def _add_missing_13_14(self, ignore_inconsistent_vdw=False):
scee = 1e10
else:
scee = 1 / pair.type.chgscale
if (abs(rref - pair.type.rmin) > SMALL and
if ignore_inconsistent_vdw:
scnb = 1.0
elif (abs(rref - pair.type.rmin) > SMALL and
pair.type.epsilon != 0):
if ignore_inconsistent_vdw:
scnb = 1.0
else:
raise TypeError('Cannot translate exceptions')
raise TypeError('Cannot translate exceptions')
if (abs(scnb - dihedral.type.scnb) < SMALL and
abs(scee - dihedral.type.scee) < SMALL):
continue
Expand Down
25 changes: 7 additions & 18 deletions parmed/amber/_chamberparm.py
Original file line number Diff line number Diff line change
Expand Up @@ -193,10 +193,10 @@ def from_structure(cls, struct, copy=False):
inst.LJ_14_radius = [0 for i in range(ntyp)]
inst.LJ_14_depth = [0 for i in range(ntyp)]
for atom in inst.atoms:
inst.LJ_radius[atom.nb_idx-1] = atom.atom_type.rmin
inst.LJ_depth[atom.nb_idx-1] = atom.atom_type.epsilon
inst.LJ_14_radius[atom.nb_idx-1] = atom.atom_type.rmin_14
inst.LJ_14_depth[atom.nb_idx-1] = atom.atom_type.epsilon_14
inst.LJ_radius[atom.nb_idx-1] = atom.rmin
inst.LJ_depth[atom.nb_idx-1] = atom.epsilon
inst.LJ_14_radius[atom.nb_idx-1] = atom.rmin_14
inst.LJ_14_depth[atom.nb_idx-1] = atom.epsilon_14
inst._add_standard_flags()
inst.pointers['NATOM'] = len(inst.atoms)
inst.parm_data['POINTERS'][NATOM] = len(inst.atoms)
Expand Down Expand Up @@ -684,33 +684,23 @@ def _set_nonbonded_tables(self, nbfixes=None):
for i in range(len(data['LENNARD_JONES_14_ACOEF'])):
data['LENNARD_JONES_14_ACOEF'][i] = None
data['LENNARD_JONES_14_BCOEF'][i] = None
atom_types_assigned_unique_idx = set()
ii = 0
while True:
needed_split = False
for pair in self.adjusts:
a1, a2 = pair.atom1, pair.atom2
i, j = sorted([a1.nb_idx - 1, a2.nb_idx - 1])
idx = data['NONBONDED_PARM_INDEX'][ntypes*i+j] - 1
eps = sqrt(a1.epsilon_14 * a2.epsilon_14)
rmin = a1.rmin_14 + a2.rmin_14
eps = pair.type.epsilon
rmin = pair.type.rmin
rmin6 = rmin * rmin * rmin * rmin * rmin * rmin
acoef = eps * rmin6*rmin6
bcoef = 2 * eps * rmin6
if data['LENNARD_JONES_14_ACOEF'][idx] is not None:
if abs(data['LENNARD_JONES_14_ACOEF'][idx] - acoef) > SMALL:
# Need to split out another type
needed_split = True
if a1.type in atom_types_assigned_unique_idx:
if a2.type in atom_types_assigned_unique_idx:
# Ugh. Split out this atom by itself
mask = '@%d' % (a1.idx + 1)
else:
mask = '@%%%s' % a2.type
atom_types_assigned_unique_idx.add(a2.type)
else:
atom_types_assigned_unique_idx.add(a1.type)
mask = '@%%%s' % a1.type
mask = '@%d' % (a1.idx + 1)
addLJType(self, mask, radius_14=0,
epsilon_14=0).execute()
ntypes += 1
Expand All @@ -731,7 +721,6 @@ def _set_nonbonded_tables(self, nbfixes=None):
# The following should never happen
assert ii <= len(self.atoms), 'Could not resolve all exceptions. ' \
'Some unexpected problem with the algorithm'
# TODO delete
# Now go through and change all None's to 0s, as these terms won't be
# used for any exceptions, anyway
for i, item in enumerate(data['LENNARD_JONES_14_ACOEF']):
Expand Down
15 changes: 13 additions & 2 deletions parmed/gromacs/_cpp.py
Original file line number Diff line number Diff line change
Expand Up @@ -328,6 +328,13 @@ def _pp_undef(self, args):
elif len(words) == 0:
raise PreProcessorError('Nothing defined in #undef')

# Context manager protocol
def __exit__(self, type, value, traceback):
self.close()

def __enter__(self):
return self

_ppcmdmap = {'if' : _pp_if, 'elif' : _pp_elif, 'ifdef' : _pp_ifdef,
'else' : _pp_else, 'define' : _pp_define, 'undef' : _pp_undef,
'include' : _pp_include, 'endif' : _pp_endif,
Expand All @@ -340,7 +347,7 @@ def _pp_undef(self, args):
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--input-file', dest='input', metavar='FILE',
required=True, help='''Input file to pre-process. Either a file
name or, if '--' is given, from standard input.''')
name or, if '-' is given, from standard input.''')
parser.add_argument('-o', '--output-file', dest='output', metavar='FILE',
default=None, help='''Output file with preprocessed results.
Default is standard output''')
Expand All @@ -361,15 +368,19 @@ def _pp_undef(self, args):
val = '1'
defines[define] = val

if opt.input == '--':
if opt.input == '-':
f = sys.stdin
else:
f = opt.input
pp = CPreProcessor(f, defines=defines, includes=opt.includes)
if opt.output is None:
output = sys.stdout
own_handle = False
else:
output = genopen(opt.output, 'w')
own_handle = True

for line in pp:
output.write(line)
if own_handle:
output.close()
Loading

0 comments on commit 536a7a7

Please sign in to comment.