@@ -130,7 +130,7 @@ def _tune(
130130 hyperparameter_ranges = None ,
131131 job_name = None ,
132132 warm_start_config = None ,
133- wait_till_terminal = True ,
133+ wait = True ,
134134 max_jobs = 2 ,
135135 max_parallel_jobs = 2 ,
136136 early_stopping_type = "Off" ,
@@ -155,7 +155,7 @@ def _tune(
155155 tuner .fit ([records , test_record_set ], job_name = job_name )
156156 print ("Started hyperparameter tuning job with name:" + tuner .latest_tuning_job .name )
157157
158- if wait_till_terminal :
158+ if wait :
159159 tuner .wait ()
160160
161161 return tuner
@@ -388,7 +388,7 @@ def test_tuning_kmeans_identical_dataset_algorithm_tuner_from_non_terminal_paren
388388 kmeans_train_set ,
389389 job_name = parent_tuning_job_name ,
390390 hyperparameter_ranges = hyperparameter_ranges ,
391- wait_till_terminal = False ,
391+ wait = False ,
392392 max_parallel_jobs = 1 ,
393393 max_jobs = 1 ,
394394 )
@@ -453,15 +453,9 @@ def test_tuning_lda(sagemaker_session, cpu_instance_type):
453453 )
454454
455455 tuning_job_name = unique_name_from_base ("test-lda" , max_length = 32 )
456+ print ("Started hyperparameter tuning job with name:" + tuning_job_name )
456457 tuner .fit ([record_set , test_record_set ], mini_batch_size = 1 , job_name = tuning_job_name )
457458
458- latest_tuning_job_name = tuner .latest_tuning_job .name
459-
460- print ("Started hyperparameter tuning job with name:" + latest_tuning_job_name )
461-
462- time .sleep (15 )
463- tuner .wait ()
464-
465459 attached_tuner = HyperparameterTuner .attach (
466460 tuning_job_name , sagemaker_session = sagemaker_session
467461 )
@@ -575,12 +569,8 @@ def test_tuning_mxnet(
575569 )
576570
577571 tuning_job_name = unique_name_from_base ("tune-mxnet" , max_length = 32 )
578- tuner .fit ({"train" : train_input , "test" : test_input }, job_name = tuning_job_name )
579-
580572 print ("Started hyperparameter tuning job with name:" + tuning_job_name )
581-
582- time .sleep (15 )
583- tuner .wait ()
573+ tuner .fit ({"train" : train_input , "test" : test_input }, job_name = tuning_job_name )
584574
585575 best_training_job = tuner .best_training_job ()
586576 with timeout_and_delete_endpoint_by_name (best_training_job , sagemaker_session ):
@@ -628,12 +618,8 @@ def test_tuning_tf(
628618 )
629619
630620 tuning_job_name = unique_name_from_base ("tune-tf" , max_length = 32 )
631- tuner .fit (inputs , job_name = tuning_job_name )
632-
633621 print ("Started hyperparameter tuning job with name: " + tuning_job_name )
634-
635- time .sleep (15 )
636- tuner .wait ()
622+ tuner .fit (inputs , job_name = tuning_job_name )
637623
638624
639625def test_tuning_tf_vpc_multi (
@@ -686,12 +672,8 @@ def test_tuning_tf_vpc_multi(
686672 )
687673
688674 tuning_job_name = unique_name_from_base ("tune-tf" , max_length = 32 )
689- tuner .fit (inputs , job_name = tuning_job_name )
690-
691675 print (f"Started hyperparameter tuning job with name: { tuning_job_name } " )
692-
693- time .sleep (15 )
694- tuner .wait ()
676+ tuner .fit (inputs , job_name = tuning_job_name )
695677
696678
697679@pytest .mark .canary_quick
@@ -740,13 +722,9 @@ def test_tuning_chainer(
740722 )
741723
742724 tuning_job_name = unique_name_from_base ("chainer" , max_length = 32 )
725+ print ("Started hyperparameter tuning job with name: {}" .format (tuning_job_name ))
743726 tuner .fit ({"train" : train_input , "test" : test_input }, job_name = tuning_job_name )
744727
745- print ("Started hyperparameter tuning job with name:" + tuning_job_name )
746-
747- time .sleep (15 )
748- tuner .wait ()
749-
750728 best_training_job = tuner .best_training_job ()
751729 with timeout_and_delete_endpoint_by_name (best_training_job , sagemaker_session ):
752730 predictor = tuner .deploy (1 , cpu_instance_type )
@@ -812,13 +790,9 @@ def test_attach_tuning_pytorch(
812790 )
813791
814792 tuning_job_name = unique_name_from_base ("pytorch" , max_length = 32 )
793+ print ("Started hyperparameter tuning job with name: {}" .format (tuning_job_name ))
815794 tuner .fit ({"training" : training_data }, job_name = tuning_job_name )
816795
817- print ("Started hyperparameter tuning job with name:" + tuning_job_name )
818-
819- time .sleep (15 )
820- tuner .wait ()
821-
822796 endpoint_name = tuning_job_name
823797 model_name = "model-name-1"
824798 attached_tuner = HyperparameterTuner .attach (
@@ -887,17 +861,14 @@ def test_tuning_byo_estimator(sagemaker_session, cpu_instance_type):
887861 max_parallel_jobs = 2 ,
888862 )
889863
864+ tuning_job_name = unique_name_from_base ("byo" , 32 )
865+ print ("Started hyperparameter tuning job with name {}:" .format (tuning_job_name ))
890866 tuner .fit (
891867 {"train" : s3_train_data , "test" : s3_train_data },
892868 include_cls_metadata = False ,
893- job_name = unique_name_from_base ( "byo" , 32 ) ,
869+ job_name = tuning_job_name ,
894870 )
895871
896- print ("Started hyperparameter tuning job with name:" + tuner .latest_tuning_job .name )
897-
898- time .sleep (15 )
899- tuner .wait ()
900-
901872 best_training_job = tuner .best_training_job ()
902873 with timeout_and_delete_endpoint_by_name (best_training_job , sagemaker_session ):
903874 predictor = tuner .deploy (1 , cpu_instance_type , endpoint_name = best_training_job )
0 commit comments