Skip to content

Commit

Permalink
Code quality improvements (#137)
Browse files Browse the repository at this point in the history
  • Loading branch information
zasdfgbnm committed Oct 30, 2018
1 parent 45252be commit 63cae6a
Show file tree
Hide file tree
Showing 6 changed files with 28 additions and 35 deletions.
1 change: 0 additions & 1 deletion .dockerignore

This file was deleted.

6 changes: 0 additions & 6 deletions Dockerfile

This file was deleted.

8 changes: 4 additions & 4 deletions tests/test_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,8 +58,8 @@ def testSplitBatch(self):

def testTensorShape(self):
for i in self.ds:
input, output = i
species, coordinates = torchani.utils.pad_coordinates(input)
input_, output = i
species, coordinates = torchani.utils.pad_coordinates(input_)
energies = output['energies']
self.assertEqual(len(species.shape), 2)
self.assertLessEqual(species.shape[0], batch_size)
Expand All @@ -72,8 +72,8 @@ def testTensorShape(self):

def testNoUnnecessaryPadding(self):
for i in self.ds:
for input in i[0]:
species, _ = input
for input_ in i[0]:
species, _ = input_
non_padding = (species >= 0)[:, -1].nonzero()
self.assertGreater(non_padding.numel(), 0)

Expand Down
14 changes: 7 additions & 7 deletions torchani/ignite.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,8 @@ def __init__(self, key, loss):
self.key = key
self.loss = loss

def forward(self, input, other):
return self.loss(input[self.key], other[self.key])
def forward(self, input_, other):
return self.loss(input_[self.key], other[self.key])


class PerAtomDictLoss(DictLoss):
Expand All @@ -60,9 +60,9 @@ class PerAtomDictLoss(DictLoss):
by the caller. Currently the only reduce operation supported is averaging.
"""

def forward(self, input, other):
loss = self.loss(input[self.key], other[self.key])
num_atoms = (input['species'] >= 0).sum(dim=1)
def forward(self, input_, other):
loss = self.loss(input_[self.key], other[self.key])
num_atoms = (input_['species'] >= 0).sum(dim=1)
loss /= num_atoms.to(loss.dtype).to(loss.device)
n = loss.numel()
return loss.sum() / n
Expand Down Expand Up @@ -102,8 +102,8 @@ def __init__(self, origin, transform):
self.origin = origin
self.transform = transform

def forward(self, input, other):
return self.transform(self.origin(input, other))
def forward(self, input_, other):
return self.transform(self.origin(input_, other))


def RMSEMetric(key):
Expand Down
30 changes: 15 additions & 15 deletions torchani/neurochem/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,11 +101,11 @@ def load_atomic_network(filename):
"""Returns an instance of :class:`torch.nn.Sequential` with hyperparameters
and parameters loaded NeuroChem's .nnf, .wparam and .bparam files."""

def decompress_nnf(buffer):
while buffer[0] != b'='[0]:
buffer = buffer[1:]
buffer = buffer[2:]
return bz2.decompress(buffer)[:-1].decode('ascii').strip()
def decompress_nnf(buffer_):
while buffer_[0] != b'='[0]:
buffer_ = buffer_[1:]
buffer_ = buffer_[2:]
return bz2.decompress(buffer_)[:-1].decode('ascii').strip()

def parse_nnf(nnf_file):
# parse input file
Expand Down Expand Up @@ -200,9 +200,9 @@ def load_param_file(linear, in_size, out_size, wfn, bfn):
networ_dir = os.path.dirname(filename)

with open(filename, 'rb') as f:
buffer = f.read()
buffer = decompress_nnf(buffer)
layer_setups = parse_nnf(buffer)
buffer_ = f.read()
buffer_ = decompress_nnf(buffer_)
layer_setups = parse_nnf(buffer_)

layers = []
for s in layer_setups:
Expand All @@ -225,18 +225,18 @@ def load_param_file(linear, in_size, out_size, wfn, bfn):
return torch.nn.Sequential(*layers)


def load_model(species, dir):
def load_model(species, dir_):
"""Returns an instance of :class:`torchani.ANIModel` loaded from
NeuroChem's network directory.
Arguments:
species (:class:`collections.abc.Sequence`): Sequence of strings for
chemical symbols of each supported atom type in correct order.
dir (str): String for directory storing network configurations.
dir_ (str): String for directory storing network configurations.
"""
models = []
for i in species:
filename = os.path.join(dir, 'ANN-{}.nnf'.format(i))
filename = os.path.join(dir_, 'ANN-{}.nnf'.format(i))
models.append(load_atomic_network(filename))
return ANIModel(models)

Expand Down Expand Up @@ -439,7 +439,7 @@ def input_size(self, v):
return TreeExec().transform(tree)

def _construct(self, network_setup, params):
dir = os.path.dirname(os.path.abspath(self.filename))
dir_ = os.path.dirname(os.path.abspath(self.filename))

# delete ignored params
def del_if_exists(key):
Expand Down Expand Up @@ -468,14 +468,14 @@ def assert_param(key, value):
assert_param('ntwshr', 0)

# load parameters
self.const_file = os.path.join(dir, params['sflparamsfile'])
self.const_file = os.path.join(dir_, params['sflparamsfile'])
self.consts = Constants(self.const_file)
self.aev_computer = AEVComputer(**self.consts)
del params['sflparamsfile']
self.sae_file = os.path.join(dir, params['atomEnergyFile'])
self.sae_file = os.path.join(dir_, params['atomEnergyFile'])
self.shift_energy = load_sae(self.sae_file)
del params['atomEnergyFile']
network_dir = os.path.join(dir, params['ntwkStoreDir'])
network_dir = os.path.join(dir_, params['ntwkStoreDir'])
if not os.path.exists(network_dir):
os.makedirs(network_dir)
self.model_checkpoint = os.path.join(network_dir, self.checkpoint_name)
Expand Down
4 changes: 2 additions & 2 deletions torchani/nn.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,8 @@ def forward(self, species_aev):
dtype=aev.dtype)
for i in present_species:
mask = (species_ == i)
input = aev.index_select(0, mask.nonzero().squeeze())
output.masked_scatter_(mask, self[i](input).squeeze())
input_ = aev.index_select(0, mask.nonzero().squeeze())
output.masked_scatter_(mask, self[i](input_).squeeze())
output = output.view_as(species)
return species, self.reducer(output, dim=1)

Expand Down

0 comments on commit 63cae6a

Please sign in to comment.