Skip to content

Commit

Permalink
Merge pull request #19 from preinaj/save-predictions-in-evaluator
Browse files Browse the repository at this point in the history
Save predictions in evaluator
  • Loading branch information
pedrolarben committed Feb 10, 2022
2 parents 8681ca9 + c4fc6a1 commit 1a32d64
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 3 deletions.
4 changes: 3 additions & 1 deletion ADLStream/evaluation/base_evaluator.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,7 @@ def __init__(

self._create_results_file()

self.visualizer = None
if self.show_plot or self.plot_file is not None:
self.visualizer = EvaluationVisualizer(self.dataset_name, self.ylabel)

Expand All @@ -105,7 +106,8 @@ def _create_results_file(self):
f.write("timestamp,instances,metric\n")

def start(self):
self.visualizer.start()
if self.visualizer is not None:
self.visualizer.start()
if self.predictions_file:
self.predictions_file = open(self.predictions_file, "a")
if self.results_file:
Expand Down
5 changes: 3 additions & 2 deletions ADLStream/models/transformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,11 +180,12 @@ def call(self, x, enc_output, training, look_ahead_mask):
Returns:
tf.Tensor: Tensor with shape (batch_size, target_seq_len, d_model)
"""
if len(x.shape) == 4:
x = tf.squeeze(x, axis=[1])

attn1 = self.mha1(x, x, x, look_ahead_mask)
attn1 = self.dropout1(attn1, training=training)
out1 = self.layernorm1(attn1 + x)

attn2 = self.mha2(enc_output, enc_output, out1, None)
attn2 = self.dropout2(attn2, training=training)
out2 = self.layernorm2(attn2 + out1)
Expand Down Expand Up @@ -473,6 +474,7 @@ def predict_step(self, data):
if self.attribute != None:
tar_inp = tf.gather(x, [self.attribute], axis=-1)
tar_inp = tf.gather(tar_inp, [tar_inp.shape[1] - 1], axis=1)
tar_inp = tf.squeeze(tar_inp, axis=[1])

for i in range(self.target_shape[0]):
output = self((x, combined_mask, tar_inp), False)
Expand Down Expand Up @@ -567,7 +569,6 @@ def Transformer(
optimizer = tf.keras.optimizers.Adam(
learning_rate, beta_1=0.9, beta_2=0.98, epsilon=1e-9
)

att_inp = input_shape[-1]
att_out = output_shape[-1]

Expand Down

0 comments on commit 1a32d64

Please sign in to comment.