Skip to content

Commit

Permalink
S3 stream handling compatibility (#67)
Browse files Browse the repository at this point in the history
  • Loading branch information
bentsku authored and viren-nadkarni committed Apr 11, 2024
1 parent 07afaef commit b55d69f
Show file tree
Hide file tree
Showing 4 changed files with 15 additions and 15 deletions.
Empty file added moto/moto_server/__init__.py
Empty file.
4 changes: 2 additions & 2 deletions moto/s3/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,8 +154,9 @@ def __init__(
)
self._value_buffer = tempfile.SpooledTemporaryFile(self._max_buffer_size)
self.disposed = False
self.value = value # type: ignore
self.checksum_value = checksum_value
self.lock = threading.Lock()
self.value = value # type: ignore

self.encryption = encryption
self.kms_key_id = kms_key_id
Expand All @@ -164,7 +165,6 @@ def __init__(
self.lock_mode = lock_mode
self.lock_legal_status = lock_legal_status
self.lock_until = lock_until
self.checksum_value = checksum_value

# Default metadata values
self._metadata["Content-Type"] = "binary/octet-stream"
Expand Down
24 changes: 11 additions & 13 deletions moto/s3/responses.py
Original file line number Diff line number Diff line change
Expand Up @@ -1705,19 +1705,17 @@ def _key_response_put(
key_to_copy.version_id
)

# checksum stuff, do we need to compute hash of the copied object
checksum_algorithm = request.headers.get("x-amz-checksum-algorithm")
if checksum_algorithm:
checksum_value = compute_checksum(
new_key.value, algorithm=checksum_algorithm
).decode("utf-8")
response_headers.update(
{"Checksum": {f"Checksum{checksum_algorithm}": checksum_value}}
)
# By default, the checksum-details for the copy will be the same as the original
# But if another algorithm is provided during the copy-operation, we override the values
new_key.checksum_algorithm = checksum_algorithm
new_key.checksum_value = checksum_value
# Commented out to be compatible with stream handling: we are doing it on our provider
# checksum_algorithm = request.headers.get("x-amz-checksum-algorithm")
# if checksum_algorithm:
# checksum_value = compute_checksum(
# new_key.value, algorithm=checksum_algorithm
# ).decode("utf-8")
# response_headers.update(
# {"Checksum": {f"Checksum{checksum_algorithm}": checksum_value}}
# )
# new_key.checksum_algorithm = checksum_algorithm
# new_key.checksum_value = checksum_value

template = self.response_template(S3_OBJECT_COPY_RESPONSE)
response_headers.update(new_key.response_dict)
Expand Down
2 changes: 2 additions & 0 deletions tests/test_s3/test_s3_copyobject.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ def test_copy_key_boto3(key_name):
assert resp["Body"].read() == b"some value"


@pytest.mark.xfail(reason="Logic implemented in LocalStack S3 provider")
@pytest.mark.aws_verified
@s3_aws_verified
def test_copy_key_boto3_with_args(bucket=None):
Expand Down Expand Up @@ -883,6 +884,7 @@ def test_copy_key_boto3_with_both_sha256_checksum(algorithm):


@mock_aws
@pytest.mark.xfail(reason="logic moved into LocalStack S3 provider")
@pytest.mark.parametrize(
"algorithm, checksum",
[
Expand Down

0 comments on commit b55d69f

Please sign in to comment.