Skip to content

Commit

Permalink
TPU compat display improvement for tensorboard
Browse files Browse the repository at this point in the history
  • Loading branch information
kpe committed Nov 16, 2019
1 parent 3683f4c commit d561d14
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 4 deletions.
2 changes: 1 addition & 1 deletion bert/attention.py
Expand Up @@ -116,7 +116,7 @@ def transpose_for_scores(input_tensor, seq_len):
attention_mask = tf.expand_dims(attention_mask, axis=1) # [B, 1, F, T]
# {1, 0} -> {0.0, -inf}
adder = (1.0 - tf.cast(attention_mask, tf.float32)) * self.params.negative_infinity
attention_scores += adder # adding to softmax -> its like removing them entirely
attention_scores = tf.add(attention_scores, adder) # adding to softmax -> its like removing them entirely

# scores to probabilities
attention_probs = tf.nn.softmax(attention_scores) # [B, N, F, T]
Expand Down
6 changes: 3 additions & 3 deletions bert/embeddings.py
Expand Up @@ -71,7 +71,7 @@ def _construct(self, params: Params):
def build(self, input_shape):
emb_shape = input_shape
self.input_spec = keras.layers.InputSpec(shape=emb_shape)
tf.assert_equal(emb_shape[-1], self.params.embedding_size)
tf.compat.v2.assert_equal(emb_shape[-1], self.params.embedding_size)

# ALBERT word embeddings projection
self.projector_layer = self.add_weight(name="projector",
Expand All @@ -86,12 +86,12 @@ def build(self, input_shape):

def call(self, inputs, **kwargs):
input_embedding = inputs
tf.assert_equal(tf.shape(input_embedding)[-1], self.params.embedding_size)
tf.compat.v2.assert_equal(tf.shape(input_embedding)[-1], self.params.embedding_size)

# ALBERT: project embedding to hidden_size
output = tf.matmul(input_embedding, self.projector_layer)
if self.projector_bias_layer is not None:
output += self.projector_bias_layer
output = tf.add(output, self.projector_bias_layer)

return output

Expand Down

0 comments on commit d561d14

Please sign in to comment.