Skip to content

Commit

Permalink
don't set the tpu_job_name for pre-training
Browse files Browse the repository at this point in the history
  • Loading branch information
clarkkev committed Mar 11, 2020
1 parent cbf6f73 commit 4e0f2a5
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 2 deletions.
1 change: 1 addition & 0 deletions configure_pretraining.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,7 @@ def __init__(self, model_name, data_dir, **kwargs):
# TPU settings
self.use_tpu = False
self.num_tpu_cores = 1
self.tpu_job_name = None
self.tpu_name = None # cloud TPU to use for training
self.tpu_zone = None # GCE zone where the Cloud TPU is located in
self.gcp_project = None # project name for the Cloud TPU-enabled project
Expand Down
3 changes: 1 addition & 2 deletions run_pretraining.py
Original file line number Diff line number Diff line change
Expand Up @@ -323,8 +323,7 @@ def train_or_eval(config: configure_pretraining.PretrainingConfig):
iterations_per_loop=config.iterations_per_loop,
num_shards=(config.num_tpu_cores if config.do_train else
config.num_tpu_cores),
tpu_job_name=("train_tpu_worker" if config.do_train else
"lm_eval_tpu_worker"),
tpu_job_name=config.tpu_job_name,
per_host_input_for_training=is_per_host)
run_config = tf.estimator.tpu.RunConfig(
cluster=tpu_cluster_resolver,
Expand Down

0 comments on commit 4e0f2a5

Please sign in to comment.