@@ -1631,7 +1631,7 @@ def run( # type: ignore[override]
1631
1631
:class:`~sagemaker.workflow.pipeline_context.PipelineSession`
1632
1632
"""
1633
1633
s3_runproc_sh , inputs , job_name = self ._pack_and_upload_code (
1634
- code , source_dir , dependencies , git_config , job_name , inputs
1634
+ code , source_dir , dependencies , git_config , job_name , inputs , kms_key
1635
1635
)
1636
1636
1637
1637
# Submit a processing job.
@@ -1647,7 +1647,9 @@ def run( # type: ignore[override]
1647
1647
kms_key = kms_key ,
1648
1648
)
1649
1649
1650
- def _pack_and_upload_code (self , code , source_dir , dependencies , git_config , job_name , inputs ):
1650
+ def _pack_and_upload_code (
1651
+ self , code , source_dir , dependencies , git_config , job_name , inputs , kms_key = None
1652
+ ):
1651
1653
"""Pack local code bundle and upload to Amazon S3."""
1652
1654
if code .startswith ("s3://" ):
1653
1655
return code , inputs , job_name
@@ -1685,6 +1687,7 @@ def _pack_and_upload_code(self, code, source_dir, dependencies, git_config, job_
1685
1687
s3_runproc_sh = S3Uploader .upload_string_as_file_body (
1686
1688
self ._generate_framework_script (script ),
1687
1689
desired_s3_uri = entrypoint_s3_uri ,
1690
+ kms_key = kms_key ,
1688
1691
sagemaker_session = self .sagemaker_session ,
1689
1692
)
1690
1693
logger .info ("runproc.sh uploaded to %s" , s3_runproc_sh )
0 commit comments