Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
49 commits
Select commit Hold shift + click to select a range
e520c3c
fix: add environment key to model container only if it is not empty
Nov 4, 2022
263b87d
Accomodate now missing container environment map to unit tests
Nov 30, 2022
c97c467
fix: type hint of PySparkProcessor __init__ (#3297)
NivekNey Dec 2, 2022
de58941
fix: fix PySparkProcessor __init__ params type (#3354)
andre-marcos-perez Dec 2, 2022
41dd330
fix: Allow Py 3.7 for MMS Test Docker env (#3080)
shreyapandit Dec 2, 2022
1e23a3f
refactoring : using with statement (#3286)
maldil Dec 2, 2022
19efadf
Update local_requirements.txt PyYAML version (#3095)
shreyapandit Dec 2, 2022
76f7782
feature: Update TF 2.9 and TF 2.10 inference DLCs (#3465)
arjkesh Dec 2, 2022
fde0738
feature: Added transform with monitoring pipeline step in transformer…
keshav-chandak Dec 2, 2022
7f9f3b0
fix: Fix bug forcing uploaded tar to be named sourcedir (#3412)
claytonparnell Dec 2, 2022
5d59767
feature: Add Code Owners file (#3503)
navinsoni Dec 2, 2022
0f5cf18
prepare release v2.119.0
Dec 3, 2022
f1f0013
update development version to v2.119.1.dev0
Dec 3, 2022
b7512bc
Attempt to fix multidatamodel.py "Environment" KeyError
Dec 5, 2022
cdd4ac7
Merge remote-tracking branch 'origin/master'
Dec 5, 2022
bb4b689
feature: Add DXB region to frameworks by DLC (#3387)
RadhikaB-97 Dec 5, 2022
b68bcd9
fix: support idempotency for framework and spark processors (#3460)
brockwade633 Dec 5, 2022
32969da
feature: Update registries with new region account number mappings. (…
kenny-ezirim Dec 6, 2022
767da0a
feature: Adding support for SageMaker Training Compiler in PyTorch es…
Lokiiiiii Dec 7, 2022
d779d1b
feature: Add Neo image uri config for Pytorch 1.12 (#3507)
HappyAmazonian Dec 7, 2022
83327fb
prepare release v2.120.0
Dec 7, 2022
5bffb04
update development version to v2.120.1.dev0
Dec 7, 2022
b828396
feature: Algorithms Region Expansion OSU/DXB (#3508)
malav-shastri Dec 7, 2022
357f732
fix: Add constraints file for apache-airflow (#3510)
navinsoni Dec 7, 2022
a28d1dd
fix: FrameworkProcessor S3 uploads (#3493)
brockwade633 Dec 8, 2022
11d2475
prepare release v2.121.0
Dec 8, 2022
24171b5
update development version to v2.121.1.dev0
Dec 8, 2022
d5847d5
Fix: Differentiate SageMaker Training Compiler's PT DLCs from base PT…
Lokiiiiii Dec 8, 2022
3f6ea88
fix: Fix failing jumpstart cache unit tests (#3514)
evakravi Dec 8, 2022
4570aa6
fix: Pop out ModelPackageName from pipeline definition (#3472)
qidewenwhen Dec 9, 2022
959ea1a
prepare release v2.121.1
Dec 9, 2022
b2e8b66
update development version to v2.121.2.dev0
Dec 9, 2022
355975d
fix: Skip Bad Transform Test (#3521)
amzn-choeric Dec 9, 2022
fadc817
fix: Revert "fix: type hint of PySparkProcessor __init__" (#3524)
mufaddal-rohawala Dec 9, 2022
c5fc93f
change: Update for Tensorflow Serving 2.11 inference DLCs (#3509)
hballuru Dec 9, 2022
ec8da98
prepare release v2.121.2
Dec 12, 2022
0352122
update development version to v2.121.3.dev0
Dec 12, 2022
d6c0214
feature: Add OSU region to frameworks for DLC (#3532)
kace Dec 12, 2022
5af4feb
fix: Remove content type image/jpg from analysis configuration schema…
xgchena Dec 12, 2022
4389847
fix: unpin packaging version (#3533)
claytonparnell Dec 13, 2022
a3efddf
fix: the Hyperband support fix for the HPO (#3516)
repushko Dec 13, 2022
bd96ec5
feature: Feature Store dataset builder, delete_record, get_record, li…
mizanfiu Dec 14, 2022
fb3880f
prepare release v2.122.0
Dec 14, 2022
a584ea5
update development version to v2.122.1.dev0
Dec 14, 2022
8df713d
Merge branch 'master' into master
l3ku Dec 14, 2022
ae15a2d
Merge remote-tracking branch 'origin/master'
Dec 22, 2022
58c44bf
Merge remote-tracking branch 'origin/master'
Dec 28, 2022
6571a88
Merge remote-tracking branch 'origin/master'
Jan 10, 2023
c2201b7
Merge remote-tracking branch 'origin/master'
Jan 14, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -977,7 +977,7 @@
* default repack encryption
* support large pipeline
* add support for pytorch 1.10.0

### Documentation Changes

* SageMaker model parallel library 1.6.0 API doc
Expand Down
3 changes: 3 additions & 0 deletions src/sagemaker/image_uri_config/autogluon.json
Original file line number Diff line number Diff line change
Expand Up @@ -489,12 +489,15 @@
"cn-north-1": "727897471807",
"cn-northwest-1": "727897471807",
"eu-central-1": "763104351884",
"eu-central-2": "380420809688",
"eu-north-1": "763104351884",
"eu-west-1": "763104351884",
"eu-west-2": "763104351884",
"eu-west-3": "763104351884",
"eu-south-1": "692866216735",
"eu-south-2": "503227376785",
"me-south-1": "217643126080",
"me-central-1": "914824155844",
"sa-east-1": "763104351884",
"us-east-1": "763104351884",
"us-east-2": "763104351884",
Expand Down
11 changes: 4 additions & 7 deletions src/sagemaker/local/entities.py
Original file line number Diff line number Diff line change
Expand Up @@ -390,7 +390,7 @@ def _get_container_environment(self, **kwargs):
container
"""
environment = {}
environment.update(self.primary_container["Environment"])
environment.update(self.primary_container.get("Environment", {}))
environment["SAGEMAKER_BATCH"] = "True"
if "MaxPayloadInMB" in kwargs:
environment["SAGEMAKER_MAX_PAYLOAD_IN_MB"] = str(kwargs["MaxPayloadInMB"])
Expand Down Expand Up @@ -591,18 +591,15 @@ def serve(self):
instance_count = self.production_variant["InitialInstanceCount"]

accelerator_type = self.production_variant.get("AcceleratorType")
environment = self.primary_container.get("Environment", {})
if accelerator_type == "local_sagemaker_notebook":
self.primary_container["Environment"][
"SAGEMAKER_INFERENCE_ACCELERATOR_PRESENT"
] = "true"
environment["SAGEMAKER_INFERENCE_ACCELERATOR_PRESENT"] = "true"

self.create_time = datetime.datetime.now()
self.container = _SageMakerContainer(
instance_type, instance_count, image, self.local_session
)
self.container.serve(
self.primary_container["ModelDataUrl"], self.primary_container["Environment"]
)
self.container.serve(self.primary_container["ModelDataUrl"], environment)

serving_port = get_config_value("local.serving_port", self.local_session.config) or 8080
_wait_for_serving_container(serving_port)
Expand Down
2 changes: 1 addition & 1 deletion src/sagemaker/multidatamodel.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ def prepare_container_def(
if self.model:
container_definition = self.model.prepare_container_def(instance_type, accelerator_type)
image_uri = container_definition["Image"]
environment = container_definition["Environment"]
environment = container_definition.get("Environment", {})
else:
image_uri = self.image_uri
environment = self.env
Expand Down
6 changes: 3 additions & 3 deletions src/sagemaker/session.py
Original file line number Diff line number Diff line change
Expand Up @@ -5153,9 +5153,9 @@ def container_def(image_uri, model_data_url=None, env=None, container_mode=None,
dict[str, str]: A complete container definition object usable with the CreateModel API if
passed via `PrimaryContainers` field.
"""
if env is None:
env = {}
c_def = {"Image": image_uri, "Environment": env}
c_def = {"Image": image_uri}
if env:
c_def["Environment"] = env
if model_data_url:
c_def["ModelDataUrl"] = model_data_url
if container_mode:
Expand Down
3 changes: 1 addition & 2 deletions tests/unit/sagemaker/model/test_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ def test_prepare_container_def_with_model_data():
model = Model(MODEL_IMAGE)
container_def = model.prepare_container_def(INSTANCE_TYPE, "ml.eia.medium")

expected = {"Image": MODEL_IMAGE, "Environment": {}}
expected = {"Image": MODEL_IMAGE}
assert expected == container_def


Expand All @@ -158,7 +158,6 @@ def test_prepare_container_def_with_image_config():
expected = {
"Image": MODEL_IMAGE,
"ImageConfig": {"RepositoryAccessMode": "Vpc"},
"Environment": {},
}

container_def = model.prepare_container_def()
Expand Down
3 changes: 1 addition & 2 deletions tests/unit/sagemaker/tensorflow/test_tfs.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ def test_tfs_model(retrieve_image_uri, sagemaker_session, tensorflow_inference_v
serverless_inference_config=None,
)
assert IMAGE == cdef["Image"]
assert {} == cdef["Environment"]
assert cdef.get("Environment") is None

predictor = model.deploy(INSTANCE_COUNT, INSTANCE_TYPE)
assert isinstance(predictor, TensorFlowPredictor)
Expand Down Expand Up @@ -485,7 +485,6 @@ def test_register_tfs_model_auto_infer_framework(sagemaker_session, tensorflow_i
"containers": [
{
"Image": image_uri,
"Environment": ANY,
"ModelDataUrl": ANY,
"Framework": "TENSORFLOW",
"FrameworkVersion": tensorflow_inference_version,
Expand Down
5 changes: 0 additions & 5 deletions tests/unit/sagemaker/workflow/test_airflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -1014,7 +1014,6 @@ def test_amazon_alg_model_config(sagemaker_session):
"ModelName": "pca-%s" % TIME_STAMP,
"PrimaryContainer": {
"Image": "174872318107.dkr.ecr.us-west-2.amazonaws.com/pca:1",
"Environment": {},
"ModelDataUrl": "{{ model_data }}",
},
"ExecutionRoleArn": "{{ role }}",
Expand Down Expand Up @@ -1108,7 +1107,6 @@ def test_model_config_from_amazon_alg_estimator(sagemaker_session):
"ModelName": "knn-%s" % TIME_STAMP,
"PrimaryContainer": {
"Image": "174872318107.dkr.ecr.us-west-2.amazonaws.com/knn:1",
"Environment": {},
"ModelDataUrl": "s3://output/{{ ti.xcom_pull(task_ids='task_id')['Tuning']['BestTrainingJob']"
"['TrainingJobName'] }}/output/model.tar.gz",
},
Expand Down Expand Up @@ -1309,7 +1307,6 @@ def test_transform_config_from_amazon_alg_estimator(sagemaker_session):
"ModelName": "knn-%s" % TIME_STAMP,
"PrimaryContainer": {
"Image": "174872318107.dkr.ecr.us-west-2.amazonaws.com/knn:1",
"Environment": {},
"ModelDataUrl": "s3://output/{{ ti.xcom_pull(task_ids='task_id')['Training']['TrainingJobName'] }}"
"/output/model.tar.gz",
},
Expand Down Expand Up @@ -1413,7 +1410,6 @@ def test_deploy_amazon_alg_model_config(sagemaker_session):
"ModelName": "pca-%s" % TIME_STAMP,
"PrimaryContainer": {
"Image": "174872318107.dkr.ecr.us-west-2.amazonaws.com/pca:1",
"Environment": {},
"ModelDataUrl": "{{ model_data }}",
},
"ExecutionRoleArn": "{{ role }}",
Expand Down Expand Up @@ -1549,7 +1545,6 @@ def test_deploy_config_from_amazon_alg_estimator(sagemaker_session):
"ModelName": "knn-%s" % TIME_STAMP,
"PrimaryContainer": {
"Image": "174872318107.dkr.ecr.us-west-2.amazonaws.com/knn:1",
"Environment": {},
"ModelDataUrl": "s3://output/{{ ti.xcom_pull(task_ids='task_id')['Tuning']['BestTrainingJob']"
"['TrainingJobName'] }}/output/model.tar.gz",
},
Expand Down
4 changes: 2 additions & 2 deletions tests/unit/sagemaker/workflow/test_model_step.py
Original file line number Diff line number Diff line change
Expand Up @@ -621,7 +621,7 @@ def test_conditional_model_create_and_regis(
container = arguments["PrimaryContainer"]
assert container["Image"] == _IMAGE_URI
assert container["ModelDataUrl"] == {"Get": "Parameters.ModelData"}
assert not container.get("Environment", {})
assert container.get("Environment") is None
else:
raise Exception("A step exists in the collection of an invalid type.")
adjacency_list = PipelineGraph.from_pipeline(pipeline).adjacency_list
Expand Down Expand Up @@ -913,7 +913,7 @@ def _verify_register_model_container_definition(
containers = request["InferenceSpecification"]["Containers"]
assert len(containers) == 1
isinstance(containers[0].pop("ModelDataUrl"), expected_model_data_type)
container_env = containers[0]["Environment"]
container_env = containers[0].get("Environment", {})
assert container_env.pop(_SAGEMAKER_PROGRAM, None) == expected_program
submit_dir = container_env.pop(_SAGEMAKER_SUBMIT_DIRECTORY, None)
if submit_dir and not submit_dir.startswith("s3://"):
Expand Down
2 changes: 0 additions & 2 deletions tests/unit/sagemaker/workflow/test_step_collections.py
Original file line number Diff line number Diff line change
Expand Up @@ -1176,7 +1176,6 @@ def test_estimator_transformer(estimator):
"Arguments": {
"ExecutionRoleArn": "DummyRole",
"PrimaryContainer": {
"Environment": {},
"Image": "fakeimage",
"ModelDataUrl": "s3://my-bucket/model.tar.gz",
},
Expand Down Expand Up @@ -1292,7 +1291,6 @@ def test_estimator_transformer_with_model_repack_with_estimator(estimator):
assert arguments == {
"ExecutionRoleArn": "DummyRole",
"PrimaryContainer": {
"Environment": {},
"Image": "fakeimage",
},
}
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/sagemaker/workflow/test_steps.py
Original file line number Diff line number Diff line change
Expand Up @@ -803,7 +803,7 @@ def test_create_model_step(sagemaker_session):
"DependsOn": ["TestStep", "SecondTestStep"],
"Arguments": {
"ExecutionRoleArn": "DummyRole",
"PrimaryContainer": {"Environment": {}, "Image": "fakeimage"},
"PrimaryContainer": {"Image": "fakeimage"},
},
}
assert step.properties.ModelName.expr == {"Get": "Steps.MyCreateModelStep.ModelName"}
Expand Down
1 change: 1 addition & 0 deletions tests/unit/sagemaker/workflow/test_training_step.py
Original file line number Diff line number Diff line change
Expand Up @@ -778,6 +778,7 @@ def test_training_step_with_algorithm_base_local_code(
# test idempotency
step_def2 = json.loads(pipeline.definition())["Steps"][0]
del step_def2["Arguments"]["InputDataConfig"][0]["DataSource"]["S3DataSource"]["S3Uri"]

assert step_def == step_def2


Expand Down
4 changes: 2 additions & 2 deletions tests/unit/test_create_deploy_entities.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
ROLE = "myimrole"
EXPANDED_ROLE = "arn:aws:iam::111111111111:role/ExpandedRole"
IMAGE = "myimage"
FULL_CONTAINER_DEF = {"Environment": {}, "Image": IMAGE, "ModelDataUrl": "s3://mybucket/mymodel"}
FULL_CONTAINER_DEF = {"Image": IMAGE, "ModelDataUrl": "s3://mybucket/mymodel"}
VPC_CONFIG = {"Subnets": ["subnet-foo"], "SecurityGroups": ["sg-foo"]}
INITIAL_INSTANCE_COUNT = 1
INSTANCE_TYPE = "ml.c4.xlarge"
Expand Down Expand Up @@ -57,7 +57,7 @@ def test_create_model_expand_primary_container(sagemaker_session):
sagemaker_session.create_model(name=MODEL_NAME, role=ROLE, container_defs=IMAGE)

_1, _2, create_model_kwargs = sagemaker_session.sagemaker_client.create_model.mock_calls[0]
assert create_model_kwargs["PrimaryContainer"] == {"Environment": {}, "Image": IMAGE}
assert create_model_kwargs["PrimaryContainer"] == {"Image": IMAGE}


def test_create_endpoint_config(sagemaker_session):
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/test_endpoint_from_model_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
ACCELERATOR_TYPE = "ml.eia.medium"
S3_MODEL_ARTIFACTS = "s3://mybucket/mymodel"
DEPLOY_IMAGE = "mydeployimage"
CONTAINER_DEF = {"Environment": {}, "Image": DEPLOY_IMAGE, "ModelDataUrl": S3_MODEL_ARTIFACTS}
CONTAINER_DEF = {"Image": DEPLOY_IMAGE, "ModelDataUrl": S3_MODEL_ARTIFACTS}
VPC_CONFIG = {"Subnets": ["foo"], "SecurityGroupIds": ["bar"]}
DEPLOY_ROLE = "mydeployrole"
ENV_VARS = {"PYTHONUNBUFFERED": "TRUE", "some": "nonsense"}
Expand Down
2 changes: 0 additions & 2 deletions tests/unit/test_estimator.py
Original file line number Diff line number Diff line change
Expand Up @@ -2780,7 +2780,6 @@ def test_fit_deploy_tags_in_estimator(name_from_base, sagemaker_session):
role="DummyRole",
container_defs={
"ModelDataUrl": "s3://bucket/model.tar.gz",
"Environment": {},
"Image": "fakeimage",
},
enable_network_isolation=False,
Expand Down Expand Up @@ -2834,7 +2833,6 @@ def test_fit_deploy_tags(name_from_base, sagemaker_session):
role="DummyRole",
container_defs={
"ModelDataUrl": "s3://bucket/model.tar.gz",
"Environment": {},
"Image": "fakeimage",
},
enable_network_isolation=False,
Expand Down
1 change: 0 additions & 1 deletion tests/unit/test_pipeline_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -339,7 +339,6 @@ def test_network_isolation(tfo, time, sagemaker_session):
},
{
"Image": "246618743249.dkr.ecr.us-west-2.amazonaws.com/sagemaker-sparkml-serving:3.3",
"Environment": {},
"ModelDataUrl": "s3://bucket/model_2.tar.gz",
},
],
Expand Down
1 change: 0 additions & 1 deletion tests/unit/test_session.py
Original file line number Diff line number Diff line change
Expand Up @@ -1753,7 +1753,6 @@ def test_logs_for_transform_job_full_lifecycle(time, cw, sagemaker_session_full_

MODEL_NAME = "some-model"
PRIMARY_CONTAINER = {
"Environment": {},
"Image": IMAGE,
"ModelDataUrl": "s3://sagemaker-123/output/jobname/model/model.tar.gz",
}
Expand Down