From b55d69fea0a2218f1218a202fd2f9577547ed5d5 Mon Sep 17 00:00:00 2001 From: Ben Simon Hartung <42031100+bentsku@users.noreply.github.com> Date: Thu, 22 Jun 2023 14:51:02 +0300 Subject: [PATCH] S3 stream handling compatibility (#67) --- moto/moto_server/__init__.py | 0 moto/s3/models.py | 4 ++-- moto/s3/responses.py | 24 +++++++++++------------- tests/test_s3/test_s3_copyobject.py | 2 ++ 4 files changed, 15 insertions(+), 15 deletions(-) create mode 100644 moto/moto_server/__init__.py diff --git a/moto/moto_server/__init__.py b/moto/moto_server/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/moto/s3/models.py b/moto/s3/models.py index 22f4f8f42284..ca79b78d8634 100644 --- a/moto/s3/models.py +++ b/moto/s3/models.py @@ -154,8 +154,9 @@ def __init__( ) self._value_buffer = tempfile.SpooledTemporaryFile(self._max_buffer_size) self.disposed = False - self.value = value # type: ignore + self.checksum_value = checksum_value self.lock = threading.Lock() + self.value = value # type: ignore self.encryption = encryption self.kms_key_id = kms_key_id @@ -164,7 +165,6 @@ def __init__( self.lock_mode = lock_mode self.lock_legal_status = lock_legal_status self.lock_until = lock_until - self.checksum_value = checksum_value # Default metadata values self._metadata["Content-Type"] = "binary/octet-stream" diff --git a/moto/s3/responses.py b/moto/s3/responses.py index 48c5e420ec07..463734f89512 100644 --- a/moto/s3/responses.py +++ b/moto/s3/responses.py @@ -1705,19 +1705,17 @@ def _key_response_put( key_to_copy.version_id ) - # checksum stuff, do we need to compute hash of the copied object - checksum_algorithm = request.headers.get("x-amz-checksum-algorithm") - if checksum_algorithm: - checksum_value = compute_checksum( - new_key.value, algorithm=checksum_algorithm - ).decode("utf-8") - response_headers.update( - {"Checksum": {f"Checksum{checksum_algorithm}": checksum_value}} - ) - # By default, the checksum-details for the copy will be the same as the original - # But if another algorithm is provided during the copy-operation, we override the values - new_key.checksum_algorithm = checksum_algorithm - new_key.checksum_value = checksum_value + # Commented out to be compatible with stream handling: we are doing it on our provider + # checksum_algorithm = request.headers.get("x-amz-checksum-algorithm") + # if checksum_algorithm: + # checksum_value = compute_checksum( + # new_key.value, algorithm=checksum_algorithm + # ).decode("utf-8") + # response_headers.update( + # {"Checksum": {f"Checksum{checksum_algorithm}": checksum_value}} + # ) + # new_key.checksum_algorithm = checksum_algorithm + # new_key.checksum_value = checksum_value template = self.response_template(S3_OBJECT_COPY_RESPONSE) response_headers.update(new_key.response_dict) diff --git a/tests/test_s3/test_s3_copyobject.py b/tests/test_s3/test_s3_copyobject.py index c734d22c278f..a1423c7fa37a 100644 --- a/tests/test_s3/test_s3_copyobject.py +++ b/tests/test_s3/test_s3_copyobject.py @@ -37,6 +37,7 @@ def test_copy_key_boto3(key_name): assert resp["Body"].read() == b"some value" +@pytest.mark.xfail(reason="Logic implemented in LocalStack S3 provider") @pytest.mark.aws_verified @s3_aws_verified def test_copy_key_boto3_with_args(bucket=None): @@ -883,6 +884,7 @@ def test_copy_key_boto3_with_both_sha256_checksum(algorithm): @mock_aws +@pytest.mark.xfail(reason="logic moved into LocalStack S3 provider") @pytest.mark.parametrize( "algorithm, checksum", [