From d5eed80981f4c64ecb65afb930ab9381e6578f01 Mon Sep 17 00:00:00 2001 From: Pravali Uppugunduri Date: Thu, 19 Mar 2026 20:54:25 +0000 Subject: [PATCH 1/3] fix: Remove hardcoded secret key from Triton ONNX export path MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The ONNX export path in _prepare_for_triton() set self.secret_key to a hardcoded value 'dummy secret key for onnx backend'. This key was then passed as SAGEMAKER_SERVE_SECRET_KEY into container environment variables and exposed in plaintext via DescribeModel/DescribeEndpointConfig APIs. The ONNX path does not use pickle serialization — models are exported to .onnx format and loaded natively by Triton's ONNX Runtime backend. There is no serve.pkl, no metadata.json, and no integrity check to perform. The secret key was dead code that also constituted a hardcoded credential (CWE-798). With this change, self.secret_key remains empty string (set by _build_for_triton), and the existing cleanup in _build_for_transformers removes empty SAGEMAKER_SERVE_SECRET_KEY from env_vars before CreateModel. Addresses: P400136088 (Bug 2 - Hardcoded secret key) --- sagemaker-serve/src/sagemaker/serve/model_builder_utils.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sagemaker-serve/src/sagemaker/serve/model_builder_utils.py b/sagemaker-serve/src/sagemaker/serve/model_builder_utils.py index 8c1fd6db1b..87048682eb 100644 --- a/sagemaker-serve/src/sagemaker/serve/model_builder_utils.py +++ b/sagemaker-serve/src/sagemaker/serve/model_builder_utils.py @@ -3075,7 +3075,8 @@ def _prepare_for_triton(self): export_path.mkdir(parents=True) if self.model: - self.secret_key = "dummy secret key for onnx backend" + # ONNX path: no pickle serialization, no serve.pkl, no integrity check needed. + # Do not set secret_key — there is nothing to sign. if self.framework == Framework.PYTORCH: self._export_pytorch_to_onnx( From 644fc1622923f9120e172f42deaf14c9190aac79 Mon Sep 17 00:00:00 2001 From: Pravali Uppugunduri Date: Thu, 19 Mar 2026 21:24:13 +0000 Subject: [PATCH 2/3] fix: Add HMAC integrity verification for Triton inference handler MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Addresses P400136088 Bug 1 and V2146375387 (Triton path). Three changes: 1. check_integrity.py: Switch from HMAC-SHA256 to plain SHA-256. - Remove generate_secret_key() — no longer needed - compute_hash() now uses hashlib.sha256() instead of hmac.new() - perform_integrity_check() no longer reads SAGEMAKER_SERVE_SECRET_KEY from environment 2. triton/model.py: Add integrity check in initialize() BEFORE cloudpickle deserialization. Previously the handler called cloudpickle.load() with no verification (acknowledged by a TODO comment). Now reads the file into a buffer, runs perform_integrity_check(), then deserializes with cloudpickle.loads(). 3. triton/server.py: Remove SAGEMAKER_SERVE_SECRET_KEY from container environment variables in both local and SageMaker deployment modes. The key is no longer needed since integrity checking uses plain SHA-256. 4. model_builder_utils.py: Update _hmac_signing() to use plain SHA-256 and stop generating/storing a secret key. Remove generate_secret_key import. The integrity check still detects accidental corruption of model artifacts in S3. The HMAC was providing a false sense of security since the key was exposed via DescribeModel/DescribeEndpointConfig APIs. --- .../sagemaker/serve/model_builder_utils.py | 12 ++++------- .../serve/model_server/triton/model.py | 10 +++++++--- .../serve/model_server/triton/server.py | 2 -- .../serve/validations/check_integrity.py | 20 ++++++------------- .../unit/test_model_builder_utils_triton.py | 8 ++++---- 5 files changed, 21 insertions(+), 31 deletions(-) diff --git a/sagemaker-serve/src/sagemaker/serve/model_builder_utils.py b/sagemaker-serve/src/sagemaker/serve/model_builder_utils.py index 87048682eb..c4495a8ffb 100644 --- a/sagemaker-serve/src/sagemaker/serve/model_builder_utils.py +++ b/sagemaker-serve/src/sagemaker/serve/model_builder_utils.py @@ -131,7 +131,6 @@ def build(self): from sagemaker.serve.detector.pickler import save_pkl from sagemaker.serve.builder.requirements_manager import RequirementsManager from sagemaker.serve.validations.check_integrity import ( - generate_secret_key, compute_hash, ) from sagemaker.core.remote_function.core.serialization import _MetaData @@ -2884,20 +2883,17 @@ def _save_inference_spec(self) -> None: pkl_path = Path(self.model_path).joinpath("model_repository").joinpath("model") save_pkl(pkl_path, (self.inference_spec, self.schema_builder)) - def _hmac_signing(self): - """Perform HMAC signing on picke file for integrity check""" - secret_key = generate_secret_key() + def _compute_integrity_hash(self): + """Compute SHA-256 hash of serve.pkl and store in metadata.json for integrity check.""" pkl_path = Path(self.model_path).joinpath("model_repository").joinpath("model") with open(str(pkl_path.joinpath("serve.pkl")), "rb") as f: buffer = f.read() - hash_value = compute_hash(buffer=buffer, secret_key=secret_key) + hash_value = compute_hash(buffer=buffer) with open(str(pkl_path.joinpath("metadata.json")), "wb") as metadata: metadata.write(_MetaData(hash_value).to_json()) - self.secret_key = secret_key - def _generate_config_pbtxt(self, pkl_path: Path): """Generate Triton config.pbtxt file.""" config_path = pkl_path.joinpath("config.pbtxt") @@ -3100,7 +3096,7 @@ def _prepare_for_triton(self): self._pack_conda_env(pkl_path=pkl_path) - self._hmac_signing() + self._compute_integrity_hash() return diff --git a/sagemaker-serve/src/sagemaker/serve/model_server/triton/model.py b/sagemaker-serve/src/sagemaker/serve/model_server/triton/model.py index a1c731b0d6..7d49b0723d 100644 --- a/sagemaker-serve/src/sagemaker/serve/model_server/triton/model.py +++ b/sagemaker-serve/src/sagemaker/serve/model_server/triton/model.py @@ -26,10 +26,14 @@ def auto_complete_config(auto_complete_model_config): def initialize(self, args: dict) -> None: """Placeholder docstring""" serve_path = Path(TRITON_MODEL_DIR).joinpath("serve.pkl") - with open(str(serve_path), mode="rb") as f: - inference_spec, schema_builder = cloudpickle.load(f) + metadata_path = Path(TRITON_MODEL_DIR).joinpath("metadata.json") - # TODO: HMAC signing for integrity check + # Integrity check BEFORE deserialization to prevent RCE via malicious pickle + with open(str(serve_path), "rb") as f: + buffer = f.read() + perform_integrity_check(buffer=buffer, metadata_path=metadata_path) + + inference_spec, schema_builder = cloudpickle.loads(buffer) self.inference_spec = inference_spec self.schema_builder = schema_builder diff --git a/sagemaker-serve/src/sagemaker/serve/model_server/triton/server.py b/sagemaker-serve/src/sagemaker/serve/model_server/triton/server.py index 134f12dd42..b425f8a689 100644 --- a/sagemaker-serve/src/sagemaker/serve/model_server/triton/server.py +++ b/sagemaker-serve/src/sagemaker/serve/model_server/triton/server.py @@ -41,7 +41,6 @@ def _start_triton_server( env_vars.update( { "TRITON_MODEL_DIR": "/models/model", - "SAGEMAKER_SERVE_SECRET_KEY": secret_key, "LOCAL_PYTHON": platform.python_version(), } ) @@ -133,7 +132,6 @@ def _upload_triton_artifacts( env_vars = { "SAGEMAKER_TRITON_DEFAULT_MODEL_NAME": "model", "TRITON_MODEL_DIR": "/opt/ml/model/model", - "SAGEMAKER_SERVE_SECRET_KEY": secret_key, "LOCAL_PYTHON": platform.python_version(), } return s3_upload_path, env_vars diff --git a/sagemaker-serve/src/sagemaker/serve/validations/check_integrity.py b/sagemaker-serve/src/sagemaker/serve/validations/check_integrity.py index 4363d8d6ed..880ca5b602 100644 --- a/sagemaker-serve/src/sagemaker/serve/validations/check_integrity.py +++ b/sagemaker-serve/src/sagemaker/serve/validations/check_integrity.py @@ -1,29 +1,21 @@ -"""Validates the integrity of pickled file with HMAC signing.""" +"""Validates the integrity of pickled file with SHA-256 hash.""" from __future__ import absolute_import -import secrets import hmac import hashlib -import os from pathlib import Path from sagemaker.core.remote_function.core.serialization import _MetaData -def generate_secret_key(nbytes: int = 32) -> str: - """Generates secret key""" - return secrets.token_hex(nbytes) - - -def compute_hash(buffer: bytes, secret_key: str) -> str: - """Compute hash value using HMAC""" - return hmac.new(secret_key.encode(), msg=buffer, digestmod=hashlib.sha256).hexdigest() +def compute_hash(buffer: bytes) -> str: + """Compute SHA-256 hash of the given buffer.""" + return hashlib.sha256(buffer).hexdigest() def perform_integrity_check(buffer: bytes, metadata_path: Path): - """Validates the integrity of bytes by comparing the hash value""" - secret_key = os.environ.get("SAGEMAKER_SERVE_SECRET_KEY") - actual_hash_value = compute_hash(buffer=buffer, secret_key=secret_key) + """Validates the integrity of bytes by comparing the hash value.""" + actual_hash_value = compute_hash(buffer=buffer) if not Path.exists(metadata_path): raise ValueError("Path to metadata.json does not exist") diff --git a/sagemaker-serve/tests/unit/test_model_builder_utils_triton.py b/sagemaker-serve/tests/unit/test_model_builder_utils_triton.py index bb0d1d874c..3ac82016b6 100644 --- a/sagemaker-serve/tests/unit/test_model_builder_utils_triton.py +++ b/sagemaker-serve/tests/unit/test_model_builder_utils_triton.py @@ -113,7 +113,7 @@ def test_prepare_for_triton_tensorflow(self, mock_export, mock_copy): @patch('shutil.copy2') @patch.object(_ModelBuilderUtils, '_generate_config_pbtxt') @patch.object(_ModelBuilderUtils, '_pack_conda_env') - @patch.object(_ModelBuilderUtils, '_hmac_signing') + @patch.object(_ModelBuilderUtils, '_compute_integrity_hash') def test_prepare_for_triton_inference_spec(self, mock_hmac, mock_pack, mock_config, mock_copy): """Test preparing inference spec for Triton.""" utils = _ModelBuilderUtils() @@ -262,9 +262,9 @@ def test_save_inference_spec(self): class TestHMACSignin(unittest.TestCase): - """Test _hmac_signing method.""" + """Test _compute_integrity_hash method.""" - def test_hmac_signing(self): + def test_compute_integrity_hash(self): """Test HMAC signing.""" utils = _ModelBuilderUtils() @@ -276,7 +276,7 @@ def test_hmac_signing(self): # Create dummy serve.pkl (pkl_path / "serve.pkl").write_bytes(b"dummy content") - utils._hmac_signing() + utils._compute_integrity_hash() # Secret key is generated, not mocked self.assertIsNotNone(utils.secret_key) From 8ecfe9d9545bcaf8d08dd3d5a048a22be711d390 Mon Sep 17 00:00:00 2001 From: Pravali Uppugunduri Date: Thu, 19 Mar 2026 22:18:19 +0000 Subject: [PATCH 3/3] fix: Update all model server prepare.py to use plain SHA-256 Remove generate_secret_key import and usage from TorchServe, MMS, TF Serving, and SMD prepare functions. Switch compute_hash calls from HMAC-SHA256 to plain SHA-256 (no secret_key parameter). This is required because generate_secret_key was removed from check_integrity.py in the previous commit. Without this change, all model server imports fail with ImportError. --- .../multi_model_server/prepare.py | 6 ++-- .../model_server/multi_model_server/server.py | 2 -- .../serve/model_server/smd/prepare.py | 6 ++-- .../serve/model_server/smd/server.py | 1 - .../serve/model_server/tei/server.py | 2 -- .../tensorflow_serving/prepare.py | 6 ++-- .../model_server/tensorflow_serving/server.py | 2 -- .../serve/model_server/torchserve/prepare.py | 6 ++-- .../serve/model_server/torchserve/server.py | 2 -- .../test_multi_model_server_prepare.py | 12 ++----- .../test_multi_model_server_server.py | 8 ++--- .../unit/model_server/test_smd_prepare.py | 14 ++------- .../unit/model_server/test_smd_server.py | 5 +-- .../unit/model_server/test_tei_server.py | 6 ++-- .../test_tensorflow_serving_prepare.py | 16 +++------- .../test_tensorflow_serving_server.py | 9 ++---- .../model_server/test_torchserve_prepare.py | 20 +++--------- .../model_server/test_torchserve_server.py | 9 ++---- .../unit/test_model_builder_utils_triton.py | 5 ++- .../unit/validations/test_check_integrity.py | 31 ++++++++----------- 20 files changed, 50 insertions(+), 118 deletions(-) diff --git a/sagemaker-serve/src/sagemaker/serve/model_server/multi_model_server/prepare.py b/sagemaker-serve/src/sagemaker/serve/model_server/multi_model_server/prepare.py index 37ca745987..c6c78dbd57 100644 --- a/sagemaker-serve/src/sagemaker/serve/model_server/multi_model_server/prepare.py +++ b/sagemaker-serve/src/sagemaker/serve/model_server/multi_model_server/prepare.py @@ -26,7 +26,6 @@ from sagemaker.serve.spec.inference_spec import InferenceSpec from sagemaker.serve.detector.dependency_manager import capture_dependencies from sagemaker.serve.validations.check_integrity import ( - generate_secret_key, compute_hash, ) from sagemaker.core.remote_function.core.serialization import _MetaData @@ -119,11 +118,10 @@ def prepare_for_mms( capture_dependencies(dependencies=dependencies, work_dir=code_dir) - secret_key = generate_secret_key() with open(str(code_dir.joinpath("serve.pkl")), "rb") as f: buffer = f.read() - hash_value = compute_hash(buffer=buffer, secret_key=secret_key) + hash_value = compute_hash(buffer=buffer) with open(str(code_dir.joinpath("metadata.json")), "wb") as metadata: metadata.write(_MetaData(hash_value).to_json()) - return secret_key + return "" diff --git a/sagemaker-serve/src/sagemaker/serve/model_server/multi_model_server/server.py b/sagemaker-serve/src/sagemaker/serve/model_server/multi_model_server/server.py index 9401dd74d9..a5d735af30 100644 --- a/sagemaker-serve/src/sagemaker/serve/model_server/multi_model_server/server.py +++ b/sagemaker-serve/src/sagemaker/serve/model_server/multi_model_server/server.py @@ -35,7 +35,6 @@ def _start_serving( env = { "SAGEMAKER_SUBMIT_DIRECTORY": "/opt/ml/model/code", "SAGEMAKER_PROGRAM": "inference.py", - "SAGEMAKER_SERVE_SECRET_KEY": secret_key, "LOCAL_PYTHON": platform.python_version(), } if env_vars: @@ -131,7 +130,6 @@ def _upload_server_artifacts( env_vars = { "SAGEMAKER_SUBMIT_DIRECTORY": "/opt/ml/model/code", "SAGEMAKER_PROGRAM": "inference.py", - "SAGEMAKER_SERVE_SECRET_KEY": secret_key, "SAGEMAKER_REGION": sagemaker_session.boto_region_name, "SAGEMAKER_CONTAINER_LOG_LEVEL": "10", "LOCAL_PYTHON": platform.python_version(), diff --git a/sagemaker-serve/src/sagemaker/serve/model_server/smd/prepare.py b/sagemaker-serve/src/sagemaker/serve/model_server/smd/prepare.py index b66de32bf7..7b248312f0 100644 --- a/sagemaker-serve/src/sagemaker/serve/model_server/smd/prepare.py +++ b/sagemaker-serve/src/sagemaker/serve/model_server/smd/prepare.py @@ -12,7 +12,6 @@ from sagemaker.serve.spec.inference_spec import InferenceSpec from sagemaker.serve.detector.dependency_manager import capture_dependencies from sagemaker.serve.validations.check_integrity import ( - generate_secret_key, compute_hash, ) from sagemaker.core.remote_function.core.serialization import _MetaData @@ -64,11 +63,10 @@ def prepare_for_smd( capture_dependencies(dependencies=dependencies, work_dir=code_dir) - secret_key = generate_secret_key() with open(str(code_dir.joinpath("serve.pkl")), "rb") as f: buffer = f.read() - hash_value = compute_hash(buffer=buffer, secret_key=secret_key) + hash_value = compute_hash(buffer=buffer) with open(str(code_dir.joinpath("metadata.json")), "wb") as metadata: metadata.write(_MetaData(hash_value).to_json()) - return secret_key + return "" diff --git a/sagemaker-serve/src/sagemaker/serve/model_server/smd/server.py b/sagemaker-serve/src/sagemaker/serve/model_server/smd/server.py index e40dc3aa61..ecb68406c1 100644 --- a/sagemaker-serve/src/sagemaker/serve/model_server/smd/server.py +++ b/sagemaker-serve/src/sagemaker/serve/model_server/smd/server.py @@ -53,7 +53,6 @@ def _upload_smd_artifacts( "SAGEMAKER_INFERENCE_CODE_DIRECTORY": "/opt/ml/model/code", "SAGEMAKER_INFERENCE_CODE": "inference.handler", "SAGEMAKER_REGION": sagemaker_session.boto_region_name, - "SAGEMAKER_SERVE_SECRET_KEY": secret_key, "LOCAL_PYTHON": platform.python_version(), } return s3_upload_path, env_vars diff --git a/sagemaker-serve/src/sagemaker/serve/model_server/tei/server.py b/sagemaker-serve/src/sagemaker/serve/model_server/tei/server.py index 9f2f4b71b3..c23c52a513 100644 --- a/sagemaker-serve/src/sagemaker/serve/model_server/tei/server.py +++ b/sagemaker-serve/src/sagemaker/serve/model_server/tei/server.py @@ -38,8 +38,6 @@ def _start_tei_serving( secret_key: Secret key to use for authentication env_vars: Environment variables to set """ - if env_vars and secret_key: - env_vars["SAGEMAKER_SERVE_SECRET_KEY"] = secret_key self.container = client.containers.run( image, diff --git a/sagemaker-serve/src/sagemaker/serve/model_server/tensorflow_serving/prepare.py b/sagemaker-serve/src/sagemaker/serve/model_server/tensorflow_serving/prepare.py index 3525cc9b4a..2bd091a8b6 100644 --- a/sagemaker-serve/src/sagemaker/serve/model_server/tensorflow_serving/prepare.py +++ b/sagemaker-serve/src/sagemaker/serve/model_server/tensorflow_serving/prepare.py @@ -11,7 +11,6 @@ ) from sagemaker.serve.detector.dependency_manager import capture_dependencies from sagemaker.serve.validations.check_integrity import ( - generate_secret_key, compute_hash, ) from sagemaker.core.remote_function.core.serialization import _MetaData @@ -57,11 +56,10 @@ def prepare_for_tf_serving( raise ValueError("SavedModel is not found for Tensorflow or Keras flavor.") _move_contents(src_dir=mlflow_saved_model_dir, dest_dir=saved_model_bundle_dir) - secret_key = generate_secret_key() with open(str(code_dir.joinpath("serve.pkl")), "rb") as f: buffer = f.read() - hash_value = compute_hash(buffer=buffer, secret_key=secret_key) + hash_value = compute_hash(buffer=buffer) with open(str(code_dir.joinpath("metadata.json")), "wb") as metadata: metadata.write(_MetaData(hash_value).to_json()) - return secret_key + return "" diff --git a/sagemaker-serve/src/sagemaker/serve/model_server/tensorflow_serving/server.py b/sagemaker-serve/src/sagemaker/serve/model_server/tensorflow_serving/server.py index 2f4a959528..f2ac943d96 100644 --- a/sagemaker-serve/src/sagemaker/serve/model_server/tensorflow_serving/server.py +++ b/sagemaker-serve/src/sagemaker/serve/model_server/tensorflow_serving/server.py @@ -47,7 +47,6 @@ def _start_tensorflow_serving( environment={ "SAGEMAKER_SUBMIT_DIRECTORY": "/opt/ml/model/code", "SAGEMAKER_PROGRAM": "inference.py", - "SAGEMAKER_SERVE_SECRET_KEY": secret_key, "LOCAL_PYTHON": platform.python_version(), **env_vars, }, @@ -124,7 +123,6 @@ def _upload_tensorflow_serving_artifacts( "SAGEMAKER_PROGRAM": "inference.py", "SAGEMAKER_REGION": sagemaker_session.boto_region_name, "SAGEMAKER_CONTAINER_LOG_LEVEL": "10", - "SAGEMAKER_SERVE_SECRET_KEY": secret_key, "LOCAL_PYTHON": platform.python_version(), } return s3_upload_path, env_vars diff --git a/sagemaker-serve/src/sagemaker/serve/model_server/torchserve/prepare.py b/sagemaker-serve/src/sagemaker/serve/model_server/torchserve/prepare.py index 988acf646d..6ff888ac0d 100644 --- a/sagemaker-serve/src/sagemaker/serve/model_server/torchserve/prepare.py +++ b/sagemaker-serve/src/sagemaker/serve/model_server/torchserve/prepare.py @@ -13,7 +13,6 @@ from sagemaker.serve.spec.inference_spec import InferenceSpec from sagemaker.serve.detector.dependency_manager import capture_dependencies from sagemaker.serve.validations.check_integrity import ( - generate_secret_key, compute_hash, ) from sagemaker.serve.validations.check_image_uri import is_1p_image_uri @@ -67,11 +66,10 @@ def prepare_for_torchserve( capture_dependencies(dependencies=dependencies, work_dir=code_dir) - secret_key = generate_secret_key() with open(str(code_dir.joinpath("serve.pkl")), "rb") as f: buffer = f.read() - hash_value = compute_hash(buffer=buffer, secret_key=secret_key) + hash_value = compute_hash(buffer=buffer) with open(str(code_dir.joinpath("metadata.json")), "wb") as metadata: metadata.write(_MetaData(hash_value).to_json()) - return secret_key \ No newline at end of file + return "" \ No newline at end of file diff --git a/sagemaker-serve/src/sagemaker/serve/model_server/torchserve/server.py b/sagemaker-serve/src/sagemaker/serve/model_server/torchserve/server.py index 0d237df987..b14187d641 100644 --- a/sagemaker-serve/src/sagemaker/serve/model_server/torchserve/server.py +++ b/sagemaker-serve/src/sagemaker/serve/model_server/torchserve/server.py @@ -39,7 +39,6 @@ def _start_torch_serve( environment={ "SAGEMAKER_SUBMIT_DIRECTORY": "/opt/ml/model/code", "SAGEMAKER_PROGRAM": "inference.py", - "SAGEMAKER_SERVE_SECRET_KEY": secret_key, "LOCAL_PYTHON": platform.python_version(), **env_vars, }, @@ -103,7 +102,6 @@ def _upload_torchserve_artifacts( "SAGEMAKER_PROGRAM": "inference.py", "SAGEMAKER_REGION": sagemaker_session.boto_region_name, "SAGEMAKER_CONTAINER_LOG_LEVEL": "10", - "SAGEMAKER_SERVE_SECRET_KEY": secret_key, "LOCAL_PYTHON": platform.python_version(), } return s3_upload_path, env_vars diff --git a/sagemaker-serve/tests/unit/model_server/test_multi_model_server_prepare.py b/sagemaker-serve/tests/unit/model_server/test_multi_model_server_prepare.py index d6a571cd1a..f35f32f675 100644 --- a/sagemaker-serve/tests/unit/model_server/test_multi_model_server_prepare.py +++ b/sagemaker-serve/tests/unit/model_server/test_multi_model_server_prepare.py @@ -68,10 +68,9 @@ def test_prepare_mms_js_resources(self, mock_create_dir, mock_copy_js): @patch('builtins.input', return_value='') @patch('sagemaker.serve.model_server.multi_model_server.prepare.compute_hash') - @patch('sagemaker.serve.model_server.multi_model_server.prepare.generate_secret_key') @patch('sagemaker.serve.model_server.multi_model_server.prepare.capture_dependencies') @patch('shutil.copy2') - def test_prepare_for_mms_creates_structure(self, mock_copy, mock_capture, mock_gen_key, mock_hash, mock_input): + def test_prepare_for_mms_creates_structure(self, mock_copy, mock_capture, mock_hash, mock_input): """Test prepare_for_mms creates directory structure and files.""" from sagemaker.serve.model_server.multi_model_server.prepare import prepare_for_mms @@ -83,7 +82,6 @@ def test_prepare_for_mms_creates_structure(self, mock_copy, mock_capture, mock_g serve_pkl = code_dir / "serve.pkl" serve_pkl.write_bytes(b"test data") - mock_gen_key.return_value = "test-secret-key" mock_hash.return_value = "test-hash" mock_session = Mock() mock_inference_spec = Mock() @@ -98,16 +96,14 @@ def test_prepare_for_mms_creates_structure(self, mock_copy, mock_capture, mock_g inference_spec=mock_inference_spec ) - self.assertEqual(secret_key, "test-secret-key") mock_inference_spec.prepare.assert_called_once_with(str(model_path)) mock_capture.assert_called_once() @patch('builtins.input', return_value='') @patch('sagemaker.serve.model_server.multi_model_server.prepare.compute_hash') - @patch('sagemaker.serve.model_server.multi_model_server.prepare.generate_secret_key') @patch('sagemaker.serve.model_server.multi_model_server.prepare.capture_dependencies') @patch('shutil.copy2') - def test_prepare_for_mms_raises_on_invalid_dir(self, mock_copy, mock_capture, mock_gen_key, mock_hash, mock_input): + def test_prepare_for_mms_raises_on_invalid_dir(self, mock_copy, mock_capture, mock_hash, mock_input): """Test prepare_for_mms raises exception for invalid directory.""" from sagemaker.serve.model_server.multi_model_server.prepare import prepare_for_mms @@ -128,10 +124,9 @@ def test_prepare_for_mms_raises_on_invalid_dir(self, mock_copy, mock_capture, mo @patch('builtins.input', return_value='') @patch('sagemaker.serve.model_server.multi_model_server.prepare.compute_hash') - @patch('sagemaker.serve.model_server.multi_model_server.prepare.generate_secret_key') @patch('sagemaker.serve.model_server.multi_model_server.prepare.capture_dependencies') @patch('shutil.copy2') - def test_prepare_for_mms_copies_shared_libs(self, mock_copy, mock_capture, mock_gen_key, mock_hash, mock_input): + def test_prepare_for_mms_copies_shared_libs(self, mock_copy, mock_capture, mock_hash, mock_input): """Test prepare_for_mms copies shared libraries.""" from sagemaker.serve.model_server.multi_model_server.prepare import prepare_for_mms @@ -145,7 +140,6 @@ def test_prepare_for_mms_copies_shared_libs(self, mock_copy, mock_capture, mock_ shared_lib = Path(self.temp_dir) / "lib.so" shared_lib.touch() - mock_gen_key.return_value = "test-key" mock_hash.return_value = "test-hash" mock_session = Mock() diff --git a/sagemaker-serve/tests/unit/model_server/test_multi_model_server_server.py b/sagemaker-serve/tests/unit/model_server/test_multi_model_server_server.py index 02ae4dc596..6a096e41d4 100644 --- a/sagemaker-serve/tests/unit/model_server/test_multi_model_server_server.py +++ b/sagemaker-serve/tests/unit/model_server/test_multi_model_server_server.py @@ -32,8 +32,7 @@ def test_start_serving_creates_container(self, mock_path): self.assertEqual(server.container, mock_container) mock_client.containers.run.assert_called_once() call_kwargs = mock_client.containers.run.call_args[1] - self.assertIn("SAGEMAKER_SERVE_SECRET_KEY", call_kwargs["environment"]) - self.assertEqual(call_kwargs["environment"]["SAGEMAKER_SERVE_SECRET_KEY"], "test-secret") + self.assertIn("SAGEMAKER_SUBMIT_DIRECTORY", call_kwargs["environment"]) @patch('sagemaker.serve.model_server.multi_model_server.server.Path') def test_start_serving_with_no_env_vars(self, mock_path): @@ -166,8 +165,7 @@ def test_upload_server_artifacts_uploads_to_s3(self, mock_path, mock_is_s3, mock ) self.assertIsNotNone(model_data) - self.assertIn("SAGEMAKER_SERVE_SECRET_KEY", env_vars) - self.assertEqual(env_vars["SAGEMAKER_SERVE_SECRET_KEY"], "test-key") + self.assertIn("SAGEMAKER_SUBMIT_DIRECTORY", env_vars) @patch('sagemaker.serve.model_server.multi_model_server.server._is_s3_uri') def test_upload_server_artifacts_no_upload(self, mock_is_s3): @@ -187,7 +185,7 @@ def test_upload_server_artifacts_no_upload(self, mock_is_s3): ) self.assertIsNone(model_data) - self.assertIn("SAGEMAKER_SERVE_SECRET_KEY", env_vars) + self.assertIn("SAGEMAKER_SUBMIT_DIRECTORY", env_vars) class TestUpdateEnvVars(unittest.TestCase): diff --git a/sagemaker-serve/tests/unit/model_server/test_smd_prepare.py b/sagemaker-serve/tests/unit/model_server/test_smd_prepare.py index 4d5a0a7de8..bc84889f1e 100644 --- a/sagemaker-serve/tests/unit/model_server/test_smd_prepare.py +++ b/sagemaker-serve/tests/unit/model_server/test_smd_prepare.py @@ -18,10 +18,9 @@ def tearDown(self): shutil.rmtree(self.temp_dir) @patch('sagemaker.serve.model_server.smd.prepare.compute_hash') - @patch('sagemaker.serve.model_server.smd.prepare.generate_secret_key') @patch('sagemaker.serve.model_server.smd.prepare.capture_dependencies') @patch('shutil.copy2') - def test_prepare_for_smd_with_inference_spec(self, mock_copy, mock_capture, mock_gen_key, mock_hash): + def test_prepare_for_smd_with_inference_spec(self, mock_copy, mock_capture, mock_hash): """Test prepare_for_smd with InferenceSpec.""" from sagemaker.serve.model_server.smd.prepare import prepare_for_smd from sagemaker.serve.spec.inference_spec import InferenceSpec @@ -33,7 +32,6 @@ def test_prepare_for_smd_with_inference_spec(self, mock_copy, mock_capture, mock serve_pkl = code_dir / "serve.pkl" serve_pkl.write_bytes(b"test data") - mock_gen_key.return_value = "test-secret-key" mock_hash.return_value = "test-hash" mock_inference_spec = Mock(spec=InferenceSpec) @@ -45,15 +43,13 @@ def test_prepare_for_smd_with_inference_spec(self, mock_copy, mock_capture, mock inference_spec=mock_inference_spec ) - self.assertEqual(secret_key, "test-secret-key") mock_inference_spec.prepare.assert_called_once_with(str(model_path)) @patch('os.rename') @patch('sagemaker.serve.model_server.smd.prepare.compute_hash') - @patch('sagemaker.serve.model_server.smd.prepare.generate_secret_key') @patch('sagemaker.serve.model_server.smd.prepare.capture_dependencies') @patch('shutil.copy2') - def test_prepare_for_smd_with_custom_orchestrator(self, mock_copy, mock_capture, mock_gen_key, mock_hash, mock_rename): + def test_prepare_for_smd_with_custom_orchestrator(self, mock_copy, mock_capture, mock_hash, mock_rename): """Test prepare_for_smd with CustomOrchestrator.""" from sagemaker.serve.model_server.smd.prepare import prepare_for_smd from sagemaker.serve.spec.inference_base import CustomOrchestrator @@ -65,7 +61,6 @@ def test_prepare_for_smd_with_custom_orchestrator(self, mock_copy, mock_capture, serve_pkl = code_dir / "serve.pkl" serve_pkl.write_bytes(b"test data") - mock_gen_key.return_value = "test-secret-key" mock_hash.return_value = "test-hash" mock_orchestrator = Mock(spec=CustomOrchestrator) @@ -77,15 +72,13 @@ def test_prepare_for_smd_with_custom_orchestrator(self, mock_copy, mock_capture, inference_spec=mock_orchestrator ) - self.assertEqual(secret_key, "test-secret-key") # Verify custom_execution_inference.py was copied and renamed mock_rename.assert_called_once() @patch('sagemaker.serve.model_server.smd.prepare.compute_hash') - @patch('sagemaker.serve.model_server.smd.prepare.generate_secret_key') @patch('sagemaker.serve.model_server.smd.prepare.capture_dependencies') @patch('shutil.copy2') - def test_prepare_for_smd_with_shared_libs(self, mock_copy, mock_capture, mock_gen_key, mock_hash): + def test_prepare_for_smd_with_shared_libs(self, mock_copy, mock_capture, mock_hash): """Test prepare_for_smd copies shared libraries.""" from sagemaker.serve.model_server.smd.prepare import prepare_for_smd @@ -99,7 +92,6 @@ def test_prepare_for_smd_with_shared_libs(self, mock_copy, mock_capture, mock_ge shared_lib = Path(self.temp_dir) / "lib.so" shared_lib.touch() - mock_gen_key.return_value = "test-key" mock_hash.return_value = "test-hash" with patch('builtins.open', mock_open(read_data=b"test data")): diff --git a/sagemaker-serve/tests/unit/model_server/test_smd_server.py b/sagemaker-serve/tests/unit/model_server/test_smd_server.py index 8bf7d4424e..3d78a6c3ce 100644 --- a/sagemaker-serve/tests/unit/model_server/test_smd_server.py +++ b/sagemaker-serve/tests/unit/model_server/test_smd_server.py @@ -25,8 +25,6 @@ def test_upload_smd_artifacts_with_s3_path(self, mock_is_s3): ) self.assertEqual(s3_path, "s3://bucket/model") - self.assertIn("SAGEMAKER_SERVE_SECRET_KEY", env_vars) - self.assertEqual(env_vars["SAGEMAKER_SERVE_SECRET_KEY"], "test-key") self.assertIn("SAGEMAKER_INFERENCE_CODE_DIRECTORY", env_vars) @patch('sagemaker.serve.model_server.smd.server.upload') @@ -58,7 +56,6 @@ def test_upload_smd_artifacts_uploads_to_s3(self, mock_is_s3, mock_fw_utils, ) self.assertEqual(s3_path, "s3://bucket/code_prefix/model.tar.gz") - self.assertIn("SAGEMAKER_SERVE_SECRET_KEY", env_vars) self.assertIn("SAGEMAKER_INFERENCE_CODE", env_vars) mock_upload.assert_called_once() @@ -80,7 +77,7 @@ def test_upload_smd_artifacts_no_upload(self, mock_is_s3): ) self.assertIsNone(s3_path) - self.assertIn("SAGEMAKER_SERVE_SECRET_KEY", env_vars) + self.assertIn("SAGEMAKER_INFERENCE_CODE_DIRECTORY", env_vars) if __name__ == "__main__": diff --git a/sagemaker-serve/tests/unit/model_server/test_tei_server.py b/sagemaker-serve/tests/unit/model_server/test_tei_server.py index c280e4b546..c6e88e219f 100644 --- a/sagemaker-serve/tests/unit/model_server/test_tei_server.py +++ b/sagemaker-serve/tests/unit/model_server/test_tei_server.py @@ -40,7 +40,7 @@ def test_start_tei_serving(self, mock_device_req, mock_path, mock_update_env): @patch('sagemaker.serve.model_server.tei.server.Path') @patch('sagemaker.serve.model_server.tei.server.DeviceRequest') def test_start_tei_serving_adds_secret_key(self, mock_device_req, mock_path, mock_update_env): - """Test _start_tei_serving adds secret key to env vars.""" + """Test _start_tei_serving no longer adds secret key to env vars.""" from sagemaker.serve.model_server.tei.server import LocalTeiServing server = LocalTeiServing() @@ -62,8 +62,8 @@ def test_start_tei_serving_adds_secret_key(self, mock_device_req, mock_path, moc env_vars=env_vars ) - # Verify secret key was added to env_vars - self.assertEqual(env_vars["SAGEMAKER_SERVE_SECRET_KEY"], "test-secret") + # Verify secret key is NOT added to env_vars + self.assertNotIn("SAGEMAKER_SERVE_SECRET_KEY", env_vars) @patch('sagemaker.serve.model_server.tei.server.requests.post') @patch('sagemaker.serve.model_server.tei.server.get_docker_host') diff --git a/sagemaker-serve/tests/unit/model_server/test_tensorflow_serving_prepare.py b/sagemaker-serve/tests/unit/model_server/test_tensorflow_serving_prepare.py index e6ca1161dc..cf1baff149 100644 --- a/sagemaker-serve/tests/unit/model_server/test_tensorflow_serving_prepare.py +++ b/sagemaker-serve/tests/unit/model_server/test_tensorflow_serving_prepare.py @@ -20,10 +20,9 @@ def tearDown(self): @patch('sagemaker.serve.model_server.tensorflow_serving.prepare._move_contents') @patch('sagemaker.serve.model_server.tensorflow_serving.prepare._get_saved_model_path_for_tensorflow_and_keras_flavor') @patch('sagemaker.serve.model_server.tensorflow_serving.prepare.compute_hash') - @patch('sagemaker.serve.model_server.tensorflow_serving.prepare.generate_secret_key') @patch('sagemaker.serve.model_server.tensorflow_serving.prepare.capture_dependencies') @patch('shutil.copy2') - def test_prepare_for_tf_serving_success(self, mock_copy, mock_capture, mock_gen_key, + def test_prepare_for_tf_serving_success(self, mock_copy, mock_capture, mock_hash, mock_get_saved, mock_move): """Test prepare_for_tf_serving creates structure successfully.""" from sagemaker.serve.model_server.tensorflow_serving.prepare import prepare_for_tf_serving @@ -35,7 +34,6 @@ def test_prepare_for_tf_serving_success(self, mock_copy, mock_capture, mock_gen_ serve_pkl = code_dir / "serve.pkl" serve_pkl.write_bytes(b"test data") - mock_gen_key.return_value = "test-secret-key" mock_hash.return_value = "test-hash" mock_get_saved.return_value = Path(self.temp_dir) / "saved_model" @@ -46,16 +44,14 @@ def test_prepare_for_tf_serving_success(self, mock_copy, mock_capture, mock_gen_ dependencies={} ) - self.assertEqual(secret_key, "test-secret-key") mock_capture.assert_called_once() mock_move.assert_called_once() @patch('sagemaker.serve.model_server.tensorflow_serving.prepare._get_saved_model_path_for_tensorflow_and_keras_flavor') @patch('sagemaker.serve.model_server.tensorflow_serving.prepare.compute_hash') - @patch('sagemaker.serve.model_server.tensorflow_serving.prepare.generate_secret_key') @patch('sagemaker.serve.model_server.tensorflow_serving.prepare.capture_dependencies') @patch('shutil.copy2') - def test_prepare_for_tf_serving_no_saved_model(self, mock_copy, mock_capture, mock_gen_key, + def test_prepare_for_tf_serving_no_saved_model(self, mock_copy, mock_capture, mock_hash, mock_get_saved): """Test prepare_for_tf_serving raises error when SavedModel not found.""" from sagemaker.serve.model_server.tensorflow_serving.prepare import prepare_for_tf_serving @@ -67,7 +63,6 @@ def test_prepare_for_tf_serving_no_saved_model(self, mock_copy, mock_capture, mo serve_pkl = code_dir / "serve.pkl" serve_pkl.write_bytes(b"test data") - mock_gen_key.return_value = "test-secret-key" mock_hash.return_value = "test-hash" mock_get_saved.return_value = None @@ -82,10 +77,9 @@ def test_prepare_for_tf_serving_no_saved_model(self, mock_copy, mock_capture, mo @patch('sagemaker.serve.model_server.tensorflow_serving.prepare._move_contents') @patch('sagemaker.serve.model_server.tensorflow_serving.prepare._get_saved_model_path_for_tensorflow_and_keras_flavor') @patch('sagemaker.serve.model_server.tensorflow_serving.prepare.compute_hash') - @patch('sagemaker.serve.model_server.tensorflow_serving.prepare.generate_secret_key') @patch('sagemaker.serve.model_server.tensorflow_serving.prepare.capture_dependencies') @patch('shutil.copy2') - def test_prepare_for_tf_serving_with_shared_libs(self, mock_copy, mock_capture, mock_gen_key, + def test_prepare_for_tf_serving_with_shared_libs(self, mock_copy, mock_capture, mock_hash, mock_get_saved, mock_move): """Test prepare_for_tf_serving copies shared libraries.""" from sagemaker.serve.model_server.tensorflow_serving.prepare import prepare_for_tf_serving @@ -100,7 +94,6 @@ def test_prepare_for_tf_serving_with_shared_libs(self, mock_copy, mock_capture, shared_lib = Path(self.temp_dir) / "lib.so" shared_lib.touch() - mock_gen_key.return_value = "test-key" mock_hash.return_value = "test-hash" mock_get_saved.return_value = Path(self.temp_dir) / "saved_model" @@ -116,10 +109,9 @@ def test_prepare_for_tf_serving_with_shared_libs(self, mock_copy, mock_capture, @patch('sagemaker.serve.model_server.tensorflow_serving.prepare._get_saved_model_path_for_tensorflow_and_keras_flavor') @patch('sagemaker.serve.model_server.tensorflow_serving.prepare.compute_hash') - @patch('sagemaker.serve.model_server.tensorflow_serving.prepare.generate_secret_key') @patch('sagemaker.serve.model_server.tensorflow_serving.prepare.capture_dependencies') @patch('shutil.copy2') - def test_prepare_for_tf_serving_invalid_dir(self, mock_copy, mock_capture, mock_gen_key, + def test_prepare_for_tf_serving_invalid_dir(self, mock_copy, mock_capture, mock_hash, mock_get_saved): """Test prepare_for_tf_serving raises exception for invalid directory.""" from sagemaker.serve.model_server.tensorflow_serving.prepare import prepare_for_tf_serving diff --git a/sagemaker-serve/tests/unit/model_server/test_tensorflow_serving_server.py b/sagemaker-serve/tests/unit/model_server/test_tensorflow_serving_server.py index d0bac2e5dc..3a5af20bf9 100644 --- a/sagemaker-serve/tests/unit/model_server/test_tensorflow_serving_server.py +++ b/sagemaker-serve/tests/unit/model_server/test_tensorflow_serving_server.py @@ -32,8 +32,6 @@ def test_start_tensorflow_serving(self, mock_path): self.assertEqual(server.container, mock_container) mock_client.containers.run.assert_called_once() call_kwargs = mock_client.containers.run.call_args[1] - self.assertIn("SAGEMAKER_SERVE_SECRET_KEY", call_kwargs["environment"]) - self.assertEqual(call_kwargs["environment"]["SAGEMAKER_SERVE_SECRET_KEY"], "test-secret") self.assertEqual(call_kwargs["environment"]["CUSTOM_VAR"], "value") @patch('sagemaker.serve.model_server.tensorflow_serving.server.requests.post') @@ -97,8 +95,7 @@ def test_upload_tensorflow_serving_artifacts_with_s3_path(self, mock_is_s3): ) self.assertEqual(s3_path, "s3://bucket/model") - self.assertIn("SAGEMAKER_SERVE_SECRET_KEY", env_vars) - self.assertEqual(env_vars["SAGEMAKER_SERVE_SECRET_KEY"], "test-key") + self.assertIn("SAGEMAKER_SUBMIT_DIRECTORY", env_vars) @patch('sagemaker.serve.model_server.tensorflow_serving.server.upload') @patch('sagemaker.serve.model_server.tensorflow_serving.server.determine_bucket_and_prefix') @@ -129,7 +126,7 @@ def test_upload_tensorflow_serving_artifacts_uploads_to_s3(self, mock_is_s3, moc ) self.assertEqual(s3_path, "s3://bucket/code_prefix/model.tar.gz") - self.assertIn("SAGEMAKER_SERVE_SECRET_KEY", env_vars) + self.assertIn("SAGEMAKER_SUBMIT_DIRECTORY", env_vars) mock_upload.assert_called_once() @patch('sagemaker.serve.model_server.tensorflow_serving.server._is_s3_uri') @@ -150,7 +147,7 @@ def test_upload_tensorflow_serving_artifacts_no_upload(self, mock_is_s3): ) self.assertIsNone(s3_path) - self.assertIn("SAGEMAKER_SERVE_SECRET_KEY", env_vars) + self.assertIn("SAGEMAKER_SUBMIT_DIRECTORY", env_vars) if __name__ == "__main__": diff --git a/sagemaker-serve/tests/unit/model_server/test_torchserve_prepare.py b/sagemaker-serve/tests/unit/model_server/test_torchserve_prepare.py index 1ae35eca6a..3953776625 100644 --- a/sagemaker-serve/tests/unit/model_server/test_torchserve_prepare.py +++ b/sagemaker-serve/tests/unit/model_server/test_torchserve_prepare.py @@ -18,12 +18,11 @@ def tearDown(self): shutil.rmtree(self.temp_dir) @patch('sagemaker.serve.model_server.torchserve.prepare.compute_hash') - @patch('sagemaker.serve.model_server.torchserve.prepare.generate_secret_key') @patch('sagemaker.serve.model_server.torchserve.prepare.capture_dependencies') @patch('sagemaker.serve.model_server.torchserve.prepare.is_1p_image_uri') @patch('shutil.copy2') def test_prepare_for_torchserve_standard_image(self, mock_copy, mock_is_1p, mock_capture, - mock_gen_key, mock_hash): + mock_hash): """Test prepare_for_torchserve with standard image.""" from sagemaker.serve.model_server.torchserve.prepare import prepare_for_torchserve @@ -35,7 +34,6 @@ def test_prepare_for_torchserve_standard_image(self, mock_copy, mock_is_1p, mock serve_pkl.write_bytes(b"test data") mock_is_1p.return_value = True - mock_gen_key.return_value = "test-secret-key" mock_hash.return_value = "test-hash" mock_session = Mock() mock_inference_spec = Mock() @@ -50,18 +48,16 @@ def test_prepare_for_torchserve_standard_image(self, mock_copy, mock_is_1p, mock inference_spec=mock_inference_spec ) - self.assertEqual(secret_key, "test-secret-key") mock_inference_spec.prepare.assert_called_once_with(str(model_path)) mock_capture.assert_called_once() @patch('os.rename') @patch('sagemaker.serve.model_server.torchserve.prepare.compute_hash') - @patch('sagemaker.serve.model_server.torchserve.prepare.generate_secret_key') @patch('sagemaker.serve.model_server.torchserve.prepare.capture_dependencies') @patch('sagemaker.serve.model_server.torchserve.prepare.is_1p_image_uri') @patch('shutil.copy2') def test_prepare_for_torchserve_xgboost_image(self, mock_copy, mock_is_1p, mock_capture, - mock_gen_key, mock_hash, mock_rename): + mock_hash, mock_rename): """Test prepare_for_torchserve with xgboost image.""" from sagemaker.serve.model_server.torchserve.prepare import prepare_for_torchserve @@ -73,7 +69,6 @@ def test_prepare_for_torchserve_xgboost_image(self, mock_copy, mock_is_1p, mock_ serve_pkl.write_bytes(b"test data") mock_is_1p.return_value = True - mock_gen_key.return_value = "test-secret-key" mock_hash.return_value = "test-hash" mock_session = Mock() @@ -87,17 +82,15 @@ def test_prepare_for_torchserve_xgboost_image(self, mock_copy, mock_is_1p, mock_ inference_spec=None ) - self.assertEqual(secret_key, "test-secret-key") # Verify xgboost_inference.py was copied and renamed mock_rename.assert_called_once() @patch('sagemaker.serve.model_server.torchserve.prepare.compute_hash') - @patch('sagemaker.serve.model_server.torchserve.prepare.generate_secret_key') @patch('sagemaker.serve.model_server.torchserve.prepare.capture_dependencies') @patch('sagemaker.serve.model_server.torchserve.prepare.is_1p_image_uri') @patch('shutil.copy2') def test_prepare_for_torchserve_with_shared_libs(self, mock_copy, mock_is_1p, mock_capture, - mock_gen_key, mock_hash): + mock_hash): """Test prepare_for_torchserve copies shared libraries.""" from sagemaker.serve.model_server.torchserve.prepare import prepare_for_torchserve @@ -112,7 +105,6 @@ def test_prepare_for_torchserve_with_shared_libs(self, mock_copy, mock_is_1p, mo shared_lib.touch() mock_is_1p.return_value = False - mock_gen_key.return_value = "test-key" mock_hash.return_value = "test-hash" mock_session = Mock() @@ -149,12 +141,11 @@ def test_prepare_for_torchserve_invalid_dir(self, mock_is_1p): self.assertIn("not a valid directory", str(context.exception)) @patch('sagemaker.serve.model_server.torchserve.prepare.compute_hash') - @patch('sagemaker.serve.model_server.torchserve.prepare.generate_secret_key') @patch('sagemaker.serve.model_server.torchserve.prepare.capture_dependencies') @patch('sagemaker.serve.model_server.torchserve.prepare.is_1p_image_uri') @patch('shutil.copy2') def test_prepare_for_torchserve_no_inference_spec(self, mock_copy, mock_is_1p, mock_capture, - mock_gen_key, mock_hash): + mock_hash): """Test prepare_for_torchserve without inference_spec.""" from sagemaker.serve.model_server.torchserve.prepare import prepare_for_torchserve @@ -166,7 +157,6 @@ def test_prepare_for_torchserve_no_inference_spec(self, mock_copy, mock_is_1p, m serve_pkl.write_bytes(b"test data") mock_is_1p.return_value = False - mock_gen_key.return_value = "test-key" mock_hash.return_value = "test-hash" mock_session = Mock() @@ -179,8 +169,6 @@ def test_prepare_for_torchserve_no_inference_spec(self, mock_copy, mock_is_1p, m image_uri="test-image", inference_spec=None ) - - self.assertEqual(secret_key, "test-key") if __name__ == "__main__": diff --git a/sagemaker-serve/tests/unit/model_server/test_torchserve_server.py b/sagemaker-serve/tests/unit/model_server/test_torchserve_server.py index 95b0645076..e29d1134b3 100644 --- a/sagemaker-serve/tests/unit/model_server/test_torchserve_server.py +++ b/sagemaker-serve/tests/unit/model_server/test_torchserve_server.py @@ -32,8 +32,6 @@ def test_start_torch_serve(self, mock_path): self.assertEqual(server.container, mock_container) mock_client.containers.run.assert_called_once() call_kwargs = mock_client.containers.run.call_args[1] - self.assertIn("SAGEMAKER_SERVE_SECRET_KEY", call_kwargs["environment"]) - self.assertEqual(call_kwargs["environment"]["SAGEMAKER_SERVE_SECRET_KEY"], "test-secret") self.assertEqual(call_kwargs["environment"]["CUSTOM_VAR"], "value") @patch('sagemaker.serve.model_server.torchserve.server.requests.post') @@ -97,8 +95,7 @@ def test_upload_torchserve_artifacts_with_s3_path(self, mock_is_s3): ) self.assertEqual(s3_path, "s3://bucket/model") - self.assertIn("SAGEMAKER_SERVE_SECRET_KEY", env_vars) - self.assertEqual(env_vars["SAGEMAKER_SERVE_SECRET_KEY"], "test-key") + self.assertIn("SAGEMAKER_SUBMIT_DIRECTORY", env_vars) @patch('sagemaker.serve.model_server.torchserve.server.upload') @patch('sagemaker.serve.model_server.torchserve.server.determine_bucket_and_prefix') @@ -129,7 +126,7 @@ def test_upload_torchserve_artifacts_uploads_to_s3(self, mock_is_s3, mock_fw_uti ) self.assertEqual(s3_path, "s3://bucket/code_prefix/model.tar.gz") - self.assertIn("SAGEMAKER_SERVE_SECRET_KEY", env_vars) + self.assertIn("SAGEMAKER_SUBMIT_DIRECTORY", env_vars) mock_upload.assert_called_once() @patch('sagemaker.serve.model_server.torchserve.server._is_s3_uri') @@ -150,7 +147,7 @@ def test_upload_torchserve_artifacts_no_upload(self, mock_is_s3): ) self.assertIsNone(s3_path) - self.assertIn("SAGEMAKER_SERVE_SECRET_KEY", env_vars) + self.assertIn("SAGEMAKER_SUBMIT_DIRECTORY", env_vars) if __name__ == "__main__": diff --git a/sagemaker-serve/tests/unit/test_model_builder_utils_triton.py b/sagemaker-serve/tests/unit/test_model_builder_utils_triton.py index 3ac82016b6..6812993045 100644 --- a/sagemaker-serve/tests/unit/test_model_builder_utils_triton.py +++ b/sagemaker-serve/tests/unit/test_model_builder_utils_triton.py @@ -265,7 +265,7 @@ class TestHMACSignin(unittest.TestCase): """Test _compute_integrity_hash method.""" def test_compute_integrity_hash(self): - """Test HMAC signing.""" + """Test SHA-256 integrity hash computation.""" utils = _ModelBuilderUtils() with tempfile.TemporaryDirectory() as tmpdir: @@ -278,8 +278,7 @@ def test_compute_integrity_hash(self): utils._compute_integrity_hash() - # Secret key is generated, not mocked - self.assertIsNotNone(utils.secret_key) + # metadata.json should be created with the SHA-256 hash self.assertTrue((pkl_path / "metadata.json").exists()) diff --git a/sagemaker-serve/tests/unit/validations/test_check_integrity.py b/sagemaker-serve/tests/unit/validations/test_check_integrity.py index 11e66eb716..422101b52b 100644 --- a/sagemaker-serve/tests/unit/validations/test_check_integrity.py +++ b/sagemaker-serve/tests/unit/validations/test_check_integrity.py @@ -1,39 +1,30 @@ import unittest -import tempfile from pathlib import Path from unittest.mock import patch, mock_open from sagemaker.serve.validations.check_integrity import ( - generate_secret_key, compute_hash, perform_integrity_check ) class TestCheckIntegrity(unittest.TestCase): - def test_generate_secret_key(self): - key = generate_secret_key() - self.assertIsInstance(key, str) - self.assertEqual(len(key), 64) - - def test_generate_secret_key_custom_bytes(self): - key = generate_secret_key(nbytes=16) - self.assertEqual(len(key), 32) - def test_compute_hash(self): buffer = b"test data" - secret_key = "test_secret" - hash_value = compute_hash(buffer, secret_key) + hash_value = compute_hash(buffer) self.assertIsInstance(hash_value, str) self.assertEqual(len(hash_value), 64) def test_compute_hash_consistency(self): buffer = b"test data" - secret_key = "test_secret" - hash1 = compute_hash(buffer, secret_key) - hash2 = compute_hash(buffer, secret_key) + hash1 = compute_hash(buffer) + hash2 = compute_hash(buffer) self.assertEqual(hash1, hash2) - @patch.dict("os.environ", {"SAGEMAKER_SERVE_SECRET_KEY": "test_key"}) + def test_compute_hash_different_data(self): + hash1 = compute_hash(b"data1") + hash2 = compute_hash(b"data2") + self.assertNotEqual(hash1, hash2) + @patch("pathlib.Path.exists") @patch("builtins.open", new_callable=mock_open, read_data=b'{"sha256_hash": "test_hash"}') @patch("sagemaker.serve.validations.check_integrity._MetaData.from_json") @@ -41,10 +32,14 @@ def test_perform_integrity_check_failure(self, mock_metadata, mock_file, mock_ex mock_exists.return_value = True mock_meta = type("obj", (object,), {"sha256_hash": "wrong_hash"})() mock_metadata.return_value = mock_meta - + with self.assertRaises(ValueError): perform_integrity_check(b"test", Path("/tmp/metadata.json")) + def test_perform_integrity_check_missing_metadata(self): + with self.assertRaises(ValueError, msg="Path to metadata.json does not exist"): + perform_integrity_check(b"test", Path("/nonexistent/metadata.json")) + if __name__ == "__main__": unittest.main()