Skip to content

Commit

Permalink
feat: disable specific s3 methods for table storage buckets
Browse files Browse the repository at this point in the history
  • Loading branch information
felixscherz committed Jan 17, 2025
1 parent 3b5ca32 commit 6752429
Show file tree
Hide file tree
Showing 4 changed files with 74 additions and 13 deletions.
10 changes: 10 additions & 0 deletions moto/s3/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -635,3 +635,13 @@ def __init__(self) -> None:
"DaysMustProvidedExceptForSelectRequest",
"`Days` must be provided except for select requests",
)


class MethodNotAllowed(S3ClientError):
code = 405

def __init__(self) -> None:
super().__init__(
"MethodNotAllowed",
"The specified method is not allowed against this resource.",
)
19 changes: 14 additions & 5 deletions moto/s3/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@
InvalidTagError,
InvalidTargetBucketForLogging,
MalformedXML,
MethodNotAllowed,
MissingBucket,
MissingKey,
NoSuchPublicAccessBlockConfiguration,
Expand Down Expand Up @@ -1646,8 +1647,7 @@ def default_retention(self) -> str:
return now.strftime("%Y-%m-%dT%H:%M:%SZ")


class FakeTableStorageBucket(FakeBucket):
...
class FakeTableStorageBucket(FakeBucket): ...


class S3Backend(BaseBackend, CloudWatchMetricProvider):
Expand Down Expand Up @@ -1724,7 +1724,9 @@ def reset(self) -> None:
# Ensure that these TemporaryFile-objects are closed, and leave no filehandles open
#
# First, check all known buckets/keys
for bucket in itertools.chain(self.buckets.values(), self.table_buckets.values()):
for bucket in itertools.chain(
self.buckets.values(), self.table_buckets.values()
):
for key in bucket.keys.values(): # type: ignore
if isinstance(key, FakeKey):
key.dispose()
Expand Down Expand Up @@ -1881,7 +1883,9 @@ def create_bucket(self, bucket_name: str, region_name: str) -> FakeBucket:

return new_bucket

def create_table_storage_bucket(self, bucket_name: str, region_name: str) -> FakeTableStorageBucket:
def create_table_storage_bucket(
self, bucket_name: str, region_name: str
) -> FakeTableStorageBucket:
if bucket_name in s3_backends.bucket_accounts.keys():
raise BucketAlreadyExists(bucket=bucket_name)

Check warning on line 1890 in moto/s3/models.py

View check run for this annotation

Codecov / codecov/patch

moto/s3/models.py#L1890

Added line #L1890 was not covered by tests
new_bucket = FakeTableStorageBucket(
Expand All @@ -1891,7 +1895,6 @@ def create_table_storage_bucket(self, bucket_name: str, region_name: str) -> Fak
self.table_buckets[bucket_name] = new_bucket
return new_bucket


def list_buckets(self) -> List[FakeBucket]:
return list(self.buckets.values())

Expand Down Expand Up @@ -2676,6 +2679,8 @@ def list_objects(
MOTO_S3_DEFAULT_MAX_KEYS=5
"""
if isinstance(bucket, FakeTableStorageBucket):
raise MethodNotAllowed()

Check warning on line 2683 in moto/s3/models.py

View check run for this annotation

Codecov / codecov/patch

moto/s3/models.py#L2683

Added line #L2683 was not covered by tests
key_results = set()
folder_results = set()
if prefix:
Expand Down Expand Up @@ -2734,6 +2739,8 @@ def list_objects_v2(
MOTO_S3_DEFAULT_MAX_KEYS=5
"""
if isinstance(bucket, FakeTableStorageBucket):
raise MethodNotAllowed()
result_keys, result_folders, _, _ = self.list_objects(
bucket, prefix, delimiter, marker=None, max_keys=None
)
Expand Down Expand Up @@ -2816,6 +2823,8 @@ def delete_object(
bypass: bool = False,
) -> Tuple[bool, Dict[str, Any]]:
bucket = self.get_bucket(bucket_name)
if isinstance(bucket, FakeTableStorageBucket):
raise MethodNotAllowed()

response_meta = {}
delete_key = bucket.keys.get(key_name)
Expand Down
17 changes: 12 additions & 5 deletions moto/s3tables/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,9 +108,9 @@ def _create_underlying_bucket(self) -> FakeBucket:
hash.update(self.name.encode())
bucket_name = f"{str(uuid4())[:21]}{hash.hexdigest()[:34]}--table-s3"

bucket = s3_backends[self.account_id][self.partition].create_table_storage_bucket(
bucket_name, region_name=self.region_name
)
bucket = s3_backends[self.account_id][
self.partition
].create_table_storage_bucket(bucket_name, region_name=self.region_name)
return bucket

def update_metadata_location(
Expand Down Expand Up @@ -401,13 +401,20 @@ def delete_table(
self, table_bucket_arn: str, namespace: str, name: str, version_token: str
) -> None:
bucket = self.table_buckets.get(table_bucket_arn)
if not bucket or namespace not in bucket.namespaces or (name not in bucket.namespaces[namespace].tables):
if (
not bucket
or namespace not in bucket.namespaces
or (name not in bucket.namespaces[namespace].tables)
):
raise TableDoesNotExist()

Check warning on line 409 in moto/s3tables/models.py

View check run for this annotation

Codecov / codecov/patch

moto/s3tables/models.py#L409

Added line #L409 was not covered by tests

ns = bucket.namespaces[namespace]
table = ns.tables[name]
from moto.s3.models import s3_backends
s3_backends[self.account_id][self.partition].delete_table_storage_bucket(table._bucket.name)

s3_backends[self.account_id][self.partition].delete_table_storage_bucket(
table._bucket.name
)

ns.tables.pop(name)

Expand Down
41 changes: 38 additions & 3 deletions tests/test_s3tables/test_s3tables.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,9 +224,9 @@ def test_delete_table_deletes_underlying_table_storage():
client.create_table(
tableBucketARN=arn, namespace="bar", name="baz", format="ICEBERG"
)
warehouse = client.get_table(
tableBucketARN=arn, namespace="bar", name="baz"
)["warehouseLocation"]
warehouse = client.get_table(tableBucketARN=arn, namespace="bar", name="baz")[
"warehouseLocation"
]

bucket_name = warehouse.replace("s3://", "")
s3.head_bucket(Bucket=bucket_name)
Expand Down Expand Up @@ -302,3 +302,38 @@ def test_write_metadata_to_table() -> None:
bucket, key = resp["metadataLocation"].replace("s3://", "").split("/", 1)
resp = s3.get_object(Bucket=bucket, Key=key)
assert resp["Body"].read() == metadata


@mock_aws
def test_underlying_table_storage_does_not_support_list_objects() -> None:
client = boto3.client("s3tables", region_name="us-east-2")
arn = client.create_table_bucket(name="foo")["arn"]
client.create_namespace(tableBucketARN=arn, namespace=["bar"])
resp = client.create_table(
tableBucketARN=arn, namespace="bar", name="baz", format="ICEBERG"
)

resp = client.get_table(tableBucketARN=arn, namespace="bar", name="baz")
s3 = boto3.client("s3", region_name="us-east-2")

bucket_name = resp["warehouseLocation"].replace("s3://", "")
with pytest.raises(s3.exceptions.ClientError):
s3.list_objects_v2(Bucket=bucket_name)


@mock_aws
def test_underlying_table_storage_does_not_support_delete_object() -> None:
client = boto3.client("s3tables", region_name="us-east-2")
arn = client.create_table_bucket(name="foo")["arn"]
client.create_namespace(tableBucketARN=arn, namespace=["bar"])
resp = client.create_table(
tableBucketARN=arn, namespace="bar", name="baz", format="ICEBERG"
)

resp = client.get_table(tableBucketARN=arn, namespace="bar", name="baz")
s3 = boto3.client("s3", region_name="us-east-2")

bucket_name = resp["warehouseLocation"].replace("s3://", "")
s3.put_object(Bucket=bucket_name, Key="test", Body=b"{}")
with pytest.raises(s3.exceptions.ClientError):
s3.delete_object(Bucket=bucket_name, Key="test")

0 comments on commit 6752429

Please sign in to comment.