From 10f5b9a208887baec64b8fee8218d286f87aa163 Mon Sep 17 00:00:00 2001 From: Jin Young Sohn Date: Fri, 10 Apr 2020 00:46:44 +0000 Subject: [PATCH] num_cores clarification and namespace eval metrics --- examples/run_glue_tpu.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/run_glue_tpu.py b/examples/run_glue_tpu.py index 694cd78e8dcd5b..dcbb49997b79f5 100644 --- a/examples/run_glue_tpu.py +++ b/examples/run_glue_tpu.py @@ -287,7 +287,7 @@ def evaluate(args, model, tokenizer, prefix="", disable_logging=False): for key in sorted(results.keys()): logger.info(" %s = %s", key, str(results[key])) writer.write("%s = %s\n" % (key, str(results[key]))) - tb_writer.add_scalar(key, results[key]) + tb_writer.add_scalar(f"{eval_task}/{key}", results[key]) if args.metrics_debug: # tpu-comment: Logging debug metrics for PyTorch/XLA (compile, execute times, ops, etc.) @@ -528,7 +528,7 @@ def get_args(): ) # TPU Parameters - parser.add_argument("--num_cores", default=8, type=int, help="Number of TPU cores to use.") + parser.add_argument("--num_cores", default=8, type=int, help="Number of TPU cores to use (1 or 8).") parser.add_argument("--metrics_debug", action="store_true", help="Whether to print debug metrics.") # Other parameters