Skip to content

Commit e7b9bee

Browse files
authored
Merge branch 'aws:master' into asyncWaiterTimeoutHandle
2 parents f716189 + 8d56659 commit e7b9bee

File tree

8 files changed

+160
-43
lines changed

8 files changed

+160
-43
lines changed

src/sagemaker/image_uri_config/autogluon.json

Lines changed: 88 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,8 @@
1212
"0.7": "0.7.0",
1313
"0.8": "0.8.2",
1414
"1.0": "1.0.0",
15-
"1.1": "1.1.1"
15+
"1.1": "1.1.1",
16+
"1.2": "1.2.0"
1617
},
1718
"versions": {
1819
"0.3.1": {
@@ -563,6 +564,47 @@
563564
"py_versions": [
564565
"py311"
565566
]
567+
},
568+
"1.2.0": {
569+
"registries": {
570+
"af-south-1": "626614931356",
571+
"il-central-1": "780543022126",
572+
"ap-east-1": "871362719292",
573+
"ap-northeast-1": "763104351884",
574+
"ap-northeast-2": "763104351884",
575+
"ap-northeast-3": "364406365360",
576+
"ap-south-1": "763104351884",
577+
"ap-southeast-1": "763104351884",
578+
"ap-southeast-2": "763104351884",
579+
"ap-southeast-3": "907027046896",
580+
"ap-southeast-4": "457447274322",
581+
"ca-central-1": "763104351884",
582+
"eu-central-1": "763104351884",
583+
"eu-north-1": "763104351884",
584+
"eu-west-1": "763104351884",
585+
"eu-west-2": "763104351884",
586+
"eu-west-3": "763104351884",
587+
"eu-south-1": "692866216735",
588+
"me-south-1": "217643126080",
589+
"sa-east-1": "763104351884",
590+
"us-east-1": "763104351884",
591+
"us-east-2": "763104351884",
592+
"us-gov-east-1": "446045086412",
593+
"us-gov-west-1": "442386744353",
594+
"us-iso-east-1": "886529160074",
595+
"us-isob-east-1": "094389454867",
596+
"us-west-1": "763104351884",
597+
"us-west-2": "763104351884",
598+
"ca-west-1": "204538143572"
599+
},
600+
"repository": "autogluon-training",
601+
"processors": [
602+
"cpu",
603+
"gpu"
604+
],
605+
"py_versions": [
606+
"py311"
607+
]
566608
}
567609
}
568610
},
@@ -575,7 +617,8 @@
575617
"0.7": "0.7.0",
576618
"0.8": "0.8.2",
577619
"1.0": "1.0.0",
578-
"1.1": "1.1.1"
620+
"1.1": "1.1.1",
621+
"1.2": "1.2.0"
579622
},
580623
"versions": {
581624
"0.3.1": {
@@ -1157,6 +1200,49 @@
11571200
"py_versions": [
11581201
"py311"
11591202
]
1203+
},
1204+
"1.2.0": {
1205+
"registries": {
1206+
"af-south-1": "626614931356",
1207+
"il-central-1": "780543022126",
1208+
"ap-east-1": "871362719292",
1209+
"ap-northeast-1": "763104351884",
1210+
"ap-northeast-2": "763104351884",
1211+
"ap-northeast-3": "364406365360",
1212+
"ap-south-1": "763104351884",
1213+
"ap-southeast-1": "763104351884",
1214+
"ap-southeast-2": "763104351884",
1215+
"ap-southeast-3": "907027046896",
1216+
"ap-southeast-4": "457447274322",
1217+
"ca-central-1": "763104351884",
1218+
"cn-north-1": "727897471807",
1219+
"cn-northwest-1": "727897471807",
1220+
"eu-central-1": "763104351884",
1221+
"eu-north-1": "763104351884",
1222+
"eu-west-1": "763104351884",
1223+
"eu-west-2": "763104351884",
1224+
"eu-west-3": "763104351884",
1225+
"eu-south-1": "692866216735",
1226+
"me-south-1": "217643126080",
1227+
"sa-east-1": "763104351884",
1228+
"us-east-1": "763104351884",
1229+
"us-east-2": "763104351884",
1230+
"us-gov-east-1": "446045086412",
1231+
"us-gov-west-1": "442386744353",
1232+
"us-iso-east-1": "886529160074",
1233+
"us-isob-east-1": "094389454867",
1234+
"us-west-1": "763104351884",
1235+
"us-west-2": "763104351884",
1236+
"ca-west-1": "204538143572"
1237+
},
1238+
"repository": "autogluon-inference",
1239+
"processors": [
1240+
"cpu",
1241+
"gpu"
1242+
],
1243+
"py_versions": [
1244+
"py311"
1245+
]
11601246
}
11611247
}
11621248
}

src/sagemaker/serve/model_server/multi_model_server/inference.py

Lines changed: 16 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
METADATA_PATH = Path(__file__).parent.joinpath("metadata.json")
2222

2323

24-
def model_fn(model_dir):
24+
def model_fn(model_dir, context=None):
2525
"""Overrides default method for loading a model"""
2626
shared_libs_path = Path(model_dir + "/shared_libs")
2727

@@ -40,16 +40,26 @@ def model_fn(model_dir):
4040
return partial(inference_spec.invoke, model=inference_spec.load(model_dir))
4141

4242

43-
def input_fn(input_data, content_type):
43+
def input_fn(input_data, content_type, context=None):
4444
"""Deserializes the bytes that were received from the model server"""
4545
try:
4646
if hasattr(schema_builder, "custom_input_translator"):
4747
deserialized_data = schema_builder.custom_input_translator.deserialize(
48-
io.BytesIO(input_data) if type(input_data)== bytes else io.BytesIO(input_data.encode('utf-8')), content_type
48+
(
49+
io.BytesIO(input_data)
50+
if type(input_data) == bytes
51+
else io.BytesIO(input_data.encode("utf-8"))
52+
),
53+
content_type,
4954
)
5055
else:
5156
deserialized_data = schema_builder.input_deserializer.deserialize(
52-
io.BytesIO(input_data) if type(input_data)== bytes else io.BytesIO(input_data.encode('utf-8')), content_type[0]
57+
(
58+
io.BytesIO(input_data)
59+
if type(input_data) == bytes
60+
else io.BytesIO(input_data.encode("utf-8"))
61+
),
62+
content_type[0],
5363
)
5464

5565
# Check if preprocess method is defined and call it
@@ -62,12 +72,12 @@ def input_fn(input_data, content_type):
6272
raise Exception("Encountered error in deserialize_request.") from e
6373

6474

65-
def predict_fn(input_data, predict_callable):
75+
def predict_fn(input_data, predict_callable, context=None):
6676
"""Invokes the model that is taken in by model server"""
6777
return predict_callable(input_data)
6878

6979

70-
def output_fn(predictions, accept_type):
80+
def output_fn(predictions, accept_type, context=None):
7181
"""Prediction is serialized to bytes and sent back to the customer"""
7282
try:
7383
if hasattr(inference_spec, "postprocess"):

src/sagemaker/serve/model_server/torchserve/inference.py

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -67,11 +67,21 @@ def input_fn(input_data, content_type):
6767
try:
6868
if hasattr(schema_builder, "custom_input_translator"):
6969
deserialized_data = schema_builder.custom_input_translator.deserialize(
70-
io.BytesIO(input_data) if type(input_data)== bytes else io.BytesIO(input_data.encode('utf-8')), content_type
70+
(
71+
io.BytesIO(input_data)
72+
if type(input_data) == bytes
73+
else io.BytesIO(input_data.encode("utf-8"))
74+
),
75+
content_type,
7176
)
7277
else:
7378
deserialized_data = schema_builder.input_deserializer.deserialize(
74-
io.BytesIO(input_data) if type(input_data)== bytes else io.BytesIO(input_data.encode('utf-8')), content_type[0]
79+
(
80+
io.BytesIO(input_data)
81+
if type(input_data) == bytes
82+
else io.BytesIO(input_data.encode("utf-8"))
83+
),
84+
content_type[0],
7585
)
7686

7787
# Check if preprocess method is defined and call it

src/sagemaker/serve/model_server/torchserve/xgboost_inference.py

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -70,11 +70,21 @@ def input_fn(input_data, content_type):
7070
try:
7171
if hasattr(schema_builder, "custom_input_translator"):
7272
return schema_builder.custom_input_translator.deserialize(
73-
io.BytesIO(input_data) if type(input_data)== bytes else io.BytesIO(input_data.encode('utf-8')), content_type
73+
(
74+
io.BytesIO(input_data)
75+
if type(input_data) == bytes
76+
else io.BytesIO(input_data.encode("utf-8"))
77+
),
78+
content_type,
7479
)
7580
else:
7681
return schema_builder.input_deserializer.deserialize(
77-
io.BytesIO(input_data) if type(input_data)== bytes else io.BytesIO(input_data.encode('utf-8')), content_type[0]
82+
(
83+
io.BytesIO(input_data)
84+
if type(input_data) == bytes
85+
else io.BytesIO(input_data.encode("utf-8"))
86+
),
87+
content_type[0],
7888
)
7989
except Exception as e:
8090
raise Exception("Encountered error in deserialize_request.") from e

src/sagemaker/workflow/utilities.py

Lines changed: 27 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -268,29 +268,29 @@ def get_config_hash(step: Entity):
268268

269269

270270
def hash_object(obj) -> str:
271-
"""Get the MD5 hash of an object.
271+
"""Get the SHA256 hash of an object.
272272
273273
Args:
274274
obj (dict): The object
275275
Returns:
276-
str: The MD5 hash of the object
276+
str: The SHA256 hash of the object
277277
"""
278-
return hashlib.md5(str(obj).encode()).hexdigest()
278+
return hashlib.sha256(str(obj).encode()).hexdigest()
279279

280280

281281
def hash_file(path: str) -> str:
282-
"""Get the MD5 hash of a file.
282+
"""Get the SHA256 hash of a file.
283283
284284
Args:
285285
path (str): The local path for the file.
286286
Returns:
287-
str: The MD5 hash of the file.
287+
str: The SHA256 hash of the file.
288288
"""
289-
return _hash_file(path, hashlib.md5()).hexdigest()
289+
return _hash_file(path, hashlib.sha256()).hexdigest()
290290

291291

292292
def hash_files_or_dirs(paths: List[str]) -> str:
293-
"""Get the MD5 hash of the contents of a list of files or directories.
293+
"""Get the SHA256 hash of the contents of a list of files or directories.
294294
295295
Hash is changed if:
296296
* input list is changed
@@ -301,58 +301,58 @@ def hash_files_or_dirs(paths: List[str]) -> str:
301301
Args:
302302
paths: List of file or directory paths
303303
Returns:
304-
str: The MD5 hash of the list of files or directories.
304+
str: The SHA256 hash of the list of files or directories.
305305
"""
306-
md5 = hashlib.md5()
306+
sha256 = hashlib.sha256()
307307
for path in sorted(paths):
308-
md5 = _hash_file_or_dir(path, md5)
309-
return md5.hexdigest()
308+
sha256 = _hash_file_or_dir(path, sha256)
309+
return sha256.hexdigest()
310310

311311

312-
def _hash_file_or_dir(path: str, md5: Hash) -> Hash:
312+
def _hash_file_or_dir(path: str, sha256: Hash) -> Hash:
313313
"""Updates the inputted Hash with the contents of the current path.
314314
315315
Args:
316316
path: path of file or directory
317317
Returns:
318-
str: The MD5 hash of the file or directory
318+
str: The SHA256 hash of the file or directory
319319
"""
320320
if isinstance(path, str) and path.lower().startswith("file://"):
321321
path = unquote(urlparse(path).path)
322-
md5.update(path.encode())
322+
sha256.update(path.encode())
323323
if Path(path).is_dir():
324-
md5 = _hash_dir(path, md5)
324+
sha256 = _hash_dir(path, sha256)
325325
elif Path(path).is_file():
326-
md5 = _hash_file(path, md5)
327-
return md5
326+
sha256 = _hash_file(path, sha256)
327+
return sha256
328328

329329

330-
def _hash_dir(directory: Union[str, Path], md5: Hash) -> Hash:
330+
def _hash_dir(directory: Union[str, Path], sha256: Hash) -> Hash:
331331
"""Updates the inputted Hash with the contents of the current path.
332332
333333
Args:
334334
directory: path of the directory
335335
Returns:
336-
str: The MD5 hash of the directory
336+
str: The SHA256 hash of the directory
337337
"""
338338
if not Path(directory).is_dir():
339339
raise ValueError(str(directory) + " is not a valid directory")
340340
for path in sorted(Path(directory).iterdir()):
341-
md5.update(path.name.encode())
341+
sha256.update(path.name.encode())
342342
if path.is_file():
343-
md5 = _hash_file(path, md5)
343+
sha256 = _hash_file(path, sha256)
344344
elif path.is_dir():
345-
md5 = _hash_dir(path, md5)
346-
return md5
345+
sha256 = _hash_dir(path, sha256)
346+
return sha256
347347

348348

349-
def _hash_file(file: Union[str, Path], md5: Hash) -> Hash:
349+
def _hash_file(file: Union[str, Path], sha256: Hash) -> Hash:
350350
"""Updates the inputted Hash with the contents of the current path.
351351
352352
Args:
353353
file: path of the file
354354
Returns:
355-
str: The MD5 hash of the file
355+
str: The SHA256 hash of the file
356356
"""
357357
if isinstance(file, str) and file.lower().startswith("file://"):
358358
file = unquote(urlparse(file).path)
@@ -363,8 +363,8 @@ def _hash_file(file: Union[str, Path], md5: Hash) -> Hash:
363363
data = f.read(BUF_SIZE)
364364
if not data:
365365
break
366-
md5.update(data)
367-
return md5
366+
sha256.update(data)
367+
return sha256
368368

369369

370370
def validate_step_args_input(

tests/unit/sagemaker/workflow/test_steps.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -671,7 +671,7 @@ def test_processing_step_normalizes_args_with_local_code(mock_normalize_args, sc
671671
mock_normalize_args.return_value = [step.inputs, step.outputs]
672672
step.to_request()
673673
mock_normalize_args.assert_called_with(
674-
job_name="MyProcessingStep-3e89f0c7e101c356cbedf27d9d27e9db",
674+
job_name="MyProcessingStep-a22fc59b38f13da26f6a40b18687ba598cf669f74104b793cefd9c63eddf4ac7",
675675
arguments=step.job_arguments,
676676
inputs=step.inputs,
677677
outputs=step.outputs,

tests/unit/sagemaker/workflow/test_utilities.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,14 +31,14 @@ def test_hash_file():
3131
with tempfile.NamedTemporaryFile() as tmp:
3232
tmp.write("hashme".encode())
3333
hash = hash_file(tmp.name)
34-
assert hash == "d41d8cd98f00b204e9800998ecf8427e"
34+
assert hash == "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
3535

3636

3737
def test_hash_file_uri():
3838
with tempfile.NamedTemporaryFile() as tmp:
3939
tmp.write("hashme".encode())
4040
hash = hash_file(f"file:///{tmp.name}")
41-
assert hash == "d41d8cd98f00b204e9800998ecf8427e"
41+
assert hash == "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
4242

4343

4444
def test_hash_files_or_dirs_with_file():

tests/unit/sagemaker/workflow/test_utils.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,8 @@ def test_repack_model_step(estimator):
8282
assert hyperparameters["sagemaker_program"] == f'"{REPACK_SCRIPT_LAUNCHER}"'
8383
assert (
8484
hyperparameters["sagemaker_submit_directory"]
85-
== '"s3://my-bucket/MyRepackModelStep-b5ea77f701b47a8d075605497462ccc2/source/sourcedir.tar.gz"'
85+
== '"s3://my-bucket/MyRepackModelStep-717d7bdd388168c27e9ad2938ff0314e35be50b3157cf2498688c7525ea27e1e\
86+
/source/sourcedir.tar.gz"'
8687
)
8788

8889
del request_dict["Arguments"]["HyperParameters"]

0 commit comments

Comments
 (0)