Skip to content

Commit

Permalink
Merge a01e688 into 31c9b6b
Browse files Browse the repository at this point in the history
  • Loading branch information
rbharath committed Oct 21, 2020
2 parents 31c9b6b + a01e688 commit 76ddcd2
Show file tree
Hide file tree
Showing 3 changed files with 318 additions and 181 deletions.
54 changes: 48 additions & 6 deletions deepchem/models/layers.py
Expand Up @@ -2686,7 +2686,14 @@ def get_config(self):
return config

def build(self, input_shape):
init = initializers.get(self.init)

def init(input_shape):
return self.add_weight(
name='kernel',
shape=(input_shape[0], input_shape[1]),
initializer=self.init,
trainable=True)

self.embedding_list = init([self.periodic_table_length, self.n_embedding])
self.built = True

Expand Down Expand Up @@ -2739,7 +2746,14 @@ def get_config(self):
return config

def build(self, input_shape):
init = initializers.get(self.init)

def init(input_shape):
return self.add_weight(
name='kernel',
shape=(input_shape[0], input_shape[1]),
initializer=self.init,
trainable=True)

self.W_cf = init([self.n_embedding, self.n_hidden])
self.W_df = init([self.n_distance, self.n_hidden])
self.W_fc = init([self.n_hidden, self.n_embedding])
Expand Down Expand Up @@ -2824,7 +2838,14 @@ def get_config(self):
def build(self, input_shape):
self.W_list = []
self.b_list = []
init = initializers.get(self.init)

def init(input_shape):
return self.add_weight(
name='kernel',
shape=(input_shape[0], input_shape[1]),
initializer=self.init,
trainable=True)

prev_layer_size = self.n_embedding
for i, layer_size in enumerate(self.layer_sizes):
self.W_list.append(init([prev_layer_size, layer_size]))
Expand Down Expand Up @@ -3230,9 +3251,16 @@ def get_config(self):
return config

def build(self, input_shape):

def init(input_shape):
return self.add_weight(
name='kernel',
shape=(input_shape[0], input_shape[1]),
initializer=self.init,
trainable=True)

n_pair_features = self.n_pair_features
n_hidden = self.n_hidden
init = initializers.get(self.init)
self.W = init([n_pair_features, n_hidden * n_hidden])
self.b = backend.zeros(shape=(n_hidden * n_hidden,))
self.built = True
Expand Down Expand Up @@ -3262,7 +3290,14 @@ def get_config(self):

def build(self, input_shape):
n_hidden = self.n_hidden
init = initializers.get(self.init)

def init(input_shape):
return self.add_weight(
name='kernel',
shape=(input_shape[0], input_shape[1]),
initializer=self.init,
trainable=True)

self.Wz = init([n_hidden, n_hidden])
self.Wr = init([n_hidden, n_hidden])
self.Wh = init([n_hidden, n_hidden])
Expand Down Expand Up @@ -3317,7 +3352,14 @@ def get_config(self):
return config

def build(self, input_shape):
init = initializers.get(self.init)

def init(input_shape):
return self.add_weight(
name='kernel',
shape=(input_shape[0], input_shape[1]),
initializer=self.init,
trainable=True)

self.U = init((2 * self.n_hidden, 4 * self.n_hidden))
self.b = tf.Variable(
np.concatenate((np.zeros(self.n_hidden), np.ones(self.n_hidden),
Expand Down

0 comments on commit 76ddcd2

Please sign in to comment.