Skip to content

Commit

Permalink
num_cores clarification and namespace eval metrics
Browse files Browse the repository at this point in the history
  • Loading branch information
jysohn23 committed Apr 10, 2020
1 parent 1eb47c5 commit 10f5b9a
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions examples/run_glue_tpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -287,7 +287,7 @@ def evaluate(args, model, tokenizer, prefix="", disable_logging=False):
for key in sorted(results.keys()):
logger.info(" %s = %s", key, str(results[key]))
writer.write("%s = %s\n" % (key, str(results[key])))
tb_writer.add_scalar(key, results[key])
tb_writer.add_scalar(f"{eval_task}/{key}", results[key])

if args.metrics_debug:
# tpu-comment: Logging debug metrics for PyTorch/XLA (compile, execute times, ops, etc.)
Expand Down Expand Up @@ -528,7 +528,7 @@ def get_args():
)

# TPU Parameters
parser.add_argument("--num_cores", default=8, type=int, help="Number of TPU cores to use.")
parser.add_argument("--num_cores", default=8, type=int, help="Number of TPU cores to use (1 or 8).")
parser.add_argument("--metrics_debug", action="store_true", help="Whether to print debug metrics.")

# Other parameters
Expand Down

0 comments on commit 10f5b9a

Please sign in to comment.