From 9df51d5e28036dcfbe15eb1a3e6ef2dfd5d9924c Mon Sep 17 00:00:00 2001 From: philschmid Date: Wed, 28 Jul 2021 13:04:21 +0200 Subject: [PATCH] fixed args for patch size --- sagemaker/02_getting_started_tensorflow/scripts/train.py | 4 ++-- sagemaker/05_spot_instances/scripts/train.py | 8 ++++---- sagemaker/06_sagemaker_metrics/scripts/train.py | 8 ++++---- .../scripts/train.py | 4 ++-- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/sagemaker/02_getting_started_tensorflow/scripts/train.py b/sagemaker/02_getting_started_tensorflow/scripts/train.py index fcc5d514..53c3f574 100644 --- a/sagemaker/02_getting_started_tensorflow/scripts/train.py +++ b/sagemaker/02_getting_started_tensorflow/scripts/train.py @@ -14,8 +14,8 @@ # Hyperparameters sent by the client are passed as command-line arguments to the script. parser.add_argument("--epochs", type=int, default=3) - parser.add_argument("--train-batch-size", type=int, default=16) - parser.add_argument("--eval-batch-size", type=int, default=8) + parser.add_argument("--train_batch_size", type=int, default=16) + parser.add_argument("--eval_batch_size", type=int, default=8) parser.add_argument("--model_name", type=str) parser.add_argument("--learning_rate", type=str, default=5e-5) parser.add_argument("--do_train", type=bool, default=True) diff --git a/sagemaker/05_spot_instances/scripts/train.py b/sagemaker/05_spot_instances/scripts/train.py index 471a66a1..131dc070 100644 --- a/sagemaker/05_spot_instances/scripts/train.py +++ b/sagemaker/05_spot_instances/scripts/train.py @@ -25,16 +25,16 @@ # hyperparameters sent by the client are passed as command-line arguments to the script. parser.add_argument("--epochs", type=int, default=3) - parser.add_argument("--train-batch-size", type=int, default=32) - parser.add_argument("--eval-batch-size", type=int, default=64) + parser.add_argument("--train_batch_size", type=int, default=32) + parser.add_argument("--eval_batch_size", type=int, default=64) parser.add_argument("--warmup_steps", type=int, default=500) parser.add_argument("--model_name", type=str) parser.add_argument("--learning_rate", type=str, default=5e-5) parser.add_argument("--output_dir", type=str) # Data, model, and output directories - parser.add_argument("--output-data-dir", type=str, default=os.environ["SM_OUTPUT_DATA_DIR"]) - parser.add_argument("--model-dir", type=str, default=os.environ["SM_MODEL_DIR"]) + parser.add_argument("--output_data_dir", type=str, default=os.environ["SM_OUTPUT_DATA_DIR"]) + parser.add_argument("--model_dir", type=str, default=os.environ["SM_MODEL_DIR"]) parser.add_argument("--n_gpus", type=str, default=os.environ["SM_NUM_GPUS"]) parser.add_argument("--training_dir", type=str, default=os.environ["SM_CHANNEL_TRAIN"]) parser.add_argument("--test_dir", type=str, default=os.environ["SM_CHANNEL_TEST"]) diff --git a/sagemaker/06_sagemaker_metrics/scripts/train.py b/sagemaker/06_sagemaker_metrics/scripts/train.py index dac07d04..4bc0add0 100644 --- a/sagemaker/06_sagemaker_metrics/scripts/train.py +++ b/sagemaker/06_sagemaker_metrics/scripts/train.py @@ -16,15 +16,15 @@ # hyperparameters sent by the client are passed as command-line arguments to the script. parser.add_argument("--epochs", type=int, default=3) - parser.add_argument("--train-batch-size", type=int, default=32) - parser.add_argument("--eval-batch-size", type=int, default=64) + parser.add_argument("--train_batch_size", type=int, default=32) + parser.add_argument("--eval_batch_size", type=int, default=64) parser.add_argument("--warmup_steps", type=int, default=500) parser.add_argument("--model_name", type=str) parser.add_argument("--learning_rate", type=float, default=5e-5) # Data, model, and output directories parser.add_argument("--checkpoints", type=str, default="/opt/ml/checkpoints/") - parser.add_argument("--model-dir", type=str, default=os.environ["SM_MODEL_DIR"]) + parser.add_argument("--model_dir", type=str, default=os.environ["SM_MODEL_DIR"]) parser.add_argument("--n_gpus", type=str, default=os.environ["SM_NUM_GPUS"]) parser.add_argument("--training_dir", type=str, default=os.environ["SM_CHANNEL_TRAIN"]) parser.add_argument("--test_dir", type=str, default=os.environ["SM_CHANNEL_TEST"]) @@ -94,4 +94,4 @@ def compute_metrics(pred): writer.write(f"{key} = {value}\n") # Saves the model locally. In SageMaker, writing in /opt/ml/model sends it to S3 - trainer.save_model(args.model_dir) \ No newline at end of file + trainer.save_model(args.model_dir) diff --git a/sagemaker/07_tensorflow_distributed_training_data_parallelism/scripts/train.py b/sagemaker/07_tensorflow_distributed_training_data_parallelism/scripts/train.py index 50aaff4e..4af5fa78 100644 --- a/sagemaker/07_tensorflow_distributed_training_data_parallelism/scripts/train.py +++ b/sagemaker/07_tensorflow_distributed_training_data_parallelism/scripts/train.py @@ -89,8 +89,8 @@ def get_datasets(): # Hyperparameters sent by the client are passed as command-line arguments to the script. parser.add_argument("--epochs", type=int, default=3) - parser.add_argument("--train-batch-size", type=int, default=16) - parser.add_argument("--eval-batch-size", type=int, default=8) + parser.add_argument("--train_batch_size", type=int, default=16) + parser.add_argument("--eval_batch_size", type=int, default=8) parser.add_argument("--model_name", type=str) parser.add_argument("--learning_rate", type=str, default=5e-5) parser.add_argument("--do_train", type=bool, default=True)