Skip to content

Commit

Permalink
Merge pull request #546 from rbharath/layer_variables
Browse files Browse the repository at this point in the history
Layers own variables
  • Loading branch information
rbharath committed May 4, 2017
2 parents f8eda94 + 648f361 commit 6652dd1
Show file tree
Hide file tree
Showing 3 changed files with 246 additions and 131 deletions.
67 changes: 50 additions & 17 deletions deepchem/models/tensorgraph/graph_layers.py
Expand Up @@ -25,25 +25,31 @@ class Combine_AP(Layer):
def __init__(self, **kwargs):
super(Combine_AP, self).__init__(**kwargs)

def create_tensor(self, in_layers=None, **kwargs):
def create_tensor(self, in_layers=None, set_tensors=True, **kwargs):
if in_layers is None:
in_layers = self.in_layers
in_layers = convert_to_layers(in_layers)
A = in_layers[0].out_tensor
P = in_layers[1].out_tensor
self.out_tensor = [A, P]
out_tensor = [A, P]
if set_tensors:
self.out_tensor = out_tensor
return out_tensor


class Separate_AP(Layer):

def __init__(self, **kwargs):
super(Separate_AP, self).__init__(**kwargs)

def create_tensor(self, in_layers=None, **kwargs):
def create_tensor(self, in_layers=None, set_tensors=True, **kwargs):
if in_layers is None:
in_layers = self.in_layers
in_layers = convert_to_layers(in_layers)
self.out_tensor = in_layers[0].out_tensor[0]
out_tensor = in_layers[0].out_tensor[0]
if set_tensors:
self.out_tensor = out_tensor
return out_tensor


class WeaveLayer(Layer):
Expand Down Expand Up @@ -108,6 +114,9 @@ def __init__(self,

def build(self):
""" Construct internal trainable weights.
TODO(rbharath): Need to make this not set instance variables to
follow style in other layers.
"""

self.W_AA = self.init([self.n_atom_input_feat, self.n_hidden_AA])
Expand Down Expand Up @@ -147,7 +156,7 @@ def build(self):
self.trainable_weights.extend(
[self.W_AP, self.b_AP, self.W_PP, self.b_PP, self.W_P, self.b_P])

def create_tensor(self, in_layers=None, **kwargs):
def create_tensor(self, in_layers=None, set_tensors=True, **kwargs):
""" description and explanation refer to deepchem.nn.WeaveLayer
parent layers: [atom_features, pair_features], pair_split, atom_to_pair
"""
Expand Down Expand Up @@ -190,7 +199,11 @@ def create_tensor(self, in_layers=None, **kwargs):
P = self.activation(P)
else:
P = pair_features
self.out_tensor = [A, P]
out_tensor = [A, P]
if set_tensors:
self.variables = self.trainable_weights
self.out_tensor = out_tensor
return out_tensor


class WeaveGather(Layer):
Expand Down Expand Up @@ -241,7 +254,7 @@ def build(self):
else:
self.trainable_weights = None

def create_tensor(self, in_layers=None, **kwargs):
def create_tensor(self, in_layers=None, set_tensors=True, **kwargs):
""" description and explanation refer to deepchem.nn.WeaveGather
parent layers: atom_features, atom_split
"""
Expand All @@ -261,7 +274,11 @@ def create_tensor(self, in_layers=None, **kwargs):
if self.gaussian_expand:
output_molecules = tf.matmul(output_molecules, self.W) + self.b
output_molecules = self.activation(output_molecules)
self.out_tensor = output_molecules
out_tensor = output_molecules
if set_tensors:
self.variables = self.trainable_weights
self.out_tensor = out_tensor
return out_tensor

def gaussian_histogram(self, x):
gaussian_memberships = [(-1.645, 0.283), (-1.080, 0.170), (-0.739, 0.134),
Expand Down Expand Up @@ -312,7 +329,7 @@ def build(self):
[self.periodic_table_length, self.n_embedding])
self.trainable_weights = [self.embedding_list]

def create_tensor(self, in_layers=None, **kwargs):
def create_tensor(self, in_layers=None, set_tensors=True, **kwargs):
"""description and explanation refer to deepchem.nn.DTNNEmbedding
parent layers: atom_number
"""
Expand Down Expand Up @@ -375,7 +392,7 @@ def build(self):
self.W_cf, self.W_df, self.W_fc, self.b_cf, self.b_df
]

def create_tensor(self, in_layers=None, **kwargs):
def create_tensor(self, in_layers=None, set_tensors=True, **kwargs):
"""description and explanation refer to deepchem.nn.DTNNStep
parent layers: atom_features, distance, distance_membership_i, distance_membership_j
"""
Expand Down Expand Up @@ -406,7 +423,11 @@ def create_tensor(self, in_layers=None, **kwargs):
# for atom i, sum the influence from all other atom j in the molecule
outputs = tf.segment_sum(outputs,
distance_membership_i) - output_ii + atom_features
self.out_tensor = outputs
out_tensor = outputs
if set_tensors:
self.variables = self.trainable_weights
self.out_tensor = out_tensor
return out_tensor


class DTNNGather(Layer):
Expand Down Expand Up @@ -461,7 +482,7 @@ def build(self):

self.trainable_weights = self.W_list + self.b_list

def create_tensor(self, in_layers=None, **kwargs):
def create_tensor(self, in_layers=None, set_tensors=True, **kwargs):
"""description and explanation refer to deepchem.nn.DTNNGather
parent layers: atom_features, atom_membership
"""
Expand All @@ -476,7 +497,11 @@ def create_tensor(self, in_layers=None, **kwargs):
output = tf.matmul(output, W) + self.b_list[i]
output = self.activation(output)
output = tf.segment_sum(output, atom_membership)
self.out_tensor = output
out_tensor = output
if set_tensors:
self.variables = self.trainable_weights
self.out_tensor = out_tensor
return out_tensor


class DAGLayer(Layer):
Expand Down Expand Up @@ -548,7 +573,7 @@ def build(self):

self.trainable_weights = self.W_list + self.b_list

def create_tensor(self, in_layers=None, **kwargs):
def create_tensor(self, in_layers=None, set_tensors=True, **kwargs):
"""description and explanation refer to deepchem.nn.DAGLayer
parent layers: atom_features, parents, calculation_orders, calculation_masks, n_atoms
"""
Expand Down Expand Up @@ -614,7 +639,11 @@ def create_tensor(self, in_layers=None, **kwargs):
graph_features = tf.scatter_nd_update(graph_features, target_index,
batch_outputs)

self.out_tensor = batch_outputs
out_tensor = batch_outputs
if set_tensors:
self.variables = self.trainable_weights
self.out_tensor = out_tensor
return out_tensor

def DAGgraph_step(self, batch_inputs, W_list, b_list):
outputs = batch_inputs
Expand Down Expand Up @@ -686,7 +715,7 @@ def build(self):

self.trainable_weights = self.W_list + self.b_list

def create_tensor(self, in_layers=None, **kwargs):
def create_tensor(self, in_layers=None, set_tensors=True, **kwargs):
"""description and explanation refer to deepchem.nn.DAGGather
parent layers: atom_features, membership
"""
Expand All @@ -704,7 +733,11 @@ def create_tensor(self, in_layers=None, **kwargs):
graph_features = tf.segment_sum(atom_features, membership)
# sum all graph outputs
outputs = self.DAGgraph_step(graph_features, self.W_list, self.b_list)
self.out_tensor = outputs
out_tensor = outputs
if set_tensors:
self.variables = self.trainable_weights
self.out_tensor = out_tensor
return out_tensor

def DAGgraph_step(self, batch_inputs, W_list, b_list):
outputs = batch_inputs
Expand Down

0 comments on commit 6652dd1

Please sign in to comment.