|
| 1 | +import boto3 |
| 2 | +import dask.dataframe as dd |
| 3 | +import numpy as np |
| 4 | +import pandas as pd |
| 5 | +import pytest |
| 6 | +from kedro.io.core import DatasetError |
| 7 | +from moto import mock_aws |
| 8 | +from s3fs import S3FileSystem |
| 9 | + |
| 10 | +from kedro_datasets.dask import CSVDataset |
| 11 | + |
| 12 | +FILE_NAME = "*.csv" |
| 13 | +BUCKET_NAME = "test_bucket" |
| 14 | +AWS_CREDENTIALS = {"key": "FAKE_ACCESS_KEY", "secret": "FAKE_SECRET_KEY"} |
| 15 | + |
| 16 | +# Pathlib cannot be used since it strips out the second slash from "s3://" |
| 17 | +S3_PATH = f"s3://{BUCKET_NAME}/{FILE_NAME}" |
| 18 | + |
| 19 | + |
| 20 | +@pytest.fixture |
| 21 | +def mocked_s3_bucket(): |
| 22 | + """Create a bucket for testing using moto.""" |
| 23 | + with mock_aws(): |
| 24 | + conn = boto3.client( |
| 25 | + "s3", |
| 26 | + aws_access_key_id="fake_access_key", |
| 27 | + aws_secret_access_key="fake_secret_key", |
| 28 | + ) |
| 29 | + conn.create_bucket(Bucket=BUCKET_NAME) |
| 30 | + yield conn |
| 31 | + |
| 32 | + |
| 33 | +@pytest.fixture |
| 34 | +def dummy_dd_dataframe() -> dd.DataFrame: |
| 35 | + df = pd.DataFrame( |
| 36 | + {"Name": ["Alex", "Bob", "Clarke", "Dave"], "Age": [31, 12, 65, 29]} |
| 37 | + ) |
| 38 | + return dd.from_pandas(df, npartitions=1) |
| 39 | + |
| 40 | + |
| 41 | +@pytest.fixture |
| 42 | +def mocked_s3_object(tmp_path, mocked_s3_bucket, dummy_dd_dataframe: dd.DataFrame): |
| 43 | + """Creates test data and adds it to mocked S3 bucket.""" |
| 44 | + pandas_df = dummy_dd_dataframe.compute() |
| 45 | + temporary_path = tmp_path / "test.csv" |
| 46 | + pandas_df.to_csv(str(temporary_path)) |
| 47 | + |
| 48 | + mocked_s3_bucket.put_object( |
| 49 | + Bucket=BUCKET_NAME, Key=FILE_NAME, Body=temporary_path.read_bytes() |
| 50 | + ) |
| 51 | + return mocked_s3_bucket |
| 52 | + |
| 53 | + |
| 54 | +@pytest.fixture |
| 55 | +def s3_dataset(load_args, save_args): |
| 56 | + return CSVDataset( |
| 57 | + filepath=S3_PATH, |
| 58 | + credentials=AWS_CREDENTIALS, |
| 59 | + load_args=load_args, |
| 60 | + save_args=save_args, |
| 61 | + ) |
| 62 | + |
| 63 | + |
| 64 | +@pytest.fixture() |
| 65 | +def s3fs_cleanup(): |
| 66 | + # clear cache so we get a clean slate every time we instantiate a S3FileSystem |
| 67 | + yield |
| 68 | + S3FileSystem.cachable = False |
| 69 | + |
| 70 | + |
| 71 | +@pytest.mark.usefixtures("s3fs_cleanup") |
| 72 | +class TestCSVDataset: |
| 73 | + def test_incorrect_credentials_load(self): |
| 74 | + """Test that incorrect credential keys won't instantiate dataset.""" |
| 75 | + pattern = r"unexpected keyword argument" |
| 76 | + with pytest.raises(DatasetError, match=pattern): |
| 77 | + CSVDataset( |
| 78 | + filepath=S3_PATH, |
| 79 | + credentials={ |
| 80 | + "client_kwargs": {"access_token": "TOKEN", "access_key": "KEY"} |
| 81 | + }, |
| 82 | + ).load().compute() |
| 83 | + |
| 84 | + @pytest.mark.parametrize("bad_credentials", [{"key": None, "secret": None}]) |
| 85 | + def test_empty_credentials_load(self, bad_credentials): |
| 86 | + csv_dataset = CSVDataset(filepath=S3_PATH, credentials=bad_credentials) |
| 87 | + pattern = r"Failed while loading data from data set CSVDataset\(.+\)" |
| 88 | + with pytest.raises(DatasetError, match=pattern): |
| 89 | + csv_dataset.load().compute() |
| 90 | + |
| 91 | + @pytest.mark.xfail |
| 92 | + def test_pass_credentials(self, mocker): |
| 93 | + """Test that AWS credentials are passed successfully into boto3 |
| 94 | + client instantiation on creating S3 connection.""" |
| 95 | + client_mock = mocker.patch("botocore.session.Session.create_client") |
| 96 | + s3_dataset = CSVDataset(filepath=S3_PATH, credentials=AWS_CREDENTIALS) |
| 97 | + pattern = r"Failed while loading data from data set CSVDataset\(.+\)" |
| 98 | + with pytest.raises(DatasetError, match=pattern): |
| 99 | + s3_dataset.load().compute() |
| 100 | + |
| 101 | + assert client_mock.call_count == 1 |
| 102 | + args, kwargs = client_mock.call_args_list[0] |
| 103 | + assert args == ("s3",) |
| 104 | + assert kwargs["aws_access_key_id"] == AWS_CREDENTIALS["key"] |
| 105 | + assert kwargs["aws_secret_access_key"] == AWS_CREDENTIALS["secret"] |
| 106 | + |
| 107 | + def test_save_data(self, s3_dataset, mocked_s3_bucket): |
| 108 | + """Test saving the data to S3.""" |
| 109 | + pd_data = pd.DataFrame( |
| 110 | + {"col1": ["a", "b"], "col2": ["c", "d"], "col3": ["e", "f"]} |
| 111 | + ) |
| 112 | + dd_data = dd.from_pandas(pd_data, npartitions=1) |
| 113 | + s3_dataset.save(dd_data) |
| 114 | + loaded_data = s3_dataset.load() |
| 115 | + np.array_equal(loaded_data.compute(), dd_data.compute()) |
| 116 | + |
| 117 | + def test_load_data(self, s3_dataset, dummy_dd_dataframe, mocked_s3_object): |
| 118 | + """Test loading the data from S3.""" |
| 119 | + loaded_data = s3_dataset.load() |
| 120 | + np.array_equal(loaded_data, dummy_dd_dataframe.compute()) |
| 121 | + |
| 122 | + def test_exists(self, s3_dataset, dummy_dd_dataframe, mocked_s3_bucket): |
| 123 | + """Test `exists` method invocation for both existing and |
| 124 | + nonexistent data set.""" |
| 125 | + assert not s3_dataset.exists() |
| 126 | + s3_dataset.save(dummy_dd_dataframe) |
| 127 | + assert s3_dataset.exists() |
| 128 | + |
| 129 | + def test_save_load_locally(self, tmp_path, dummy_dd_dataframe): |
| 130 | + """Test loading the data locally.""" |
| 131 | + file_path = str(tmp_path / "some" / "dir" / FILE_NAME) |
| 132 | + dataset = CSVDataset(filepath=file_path) |
| 133 | + |
| 134 | + assert not dataset.exists() |
| 135 | + dataset.save(dummy_dd_dataframe) |
| 136 | + assert dataset.exists() |
| 137 | + loaded_data = dataset.load() |
| 138 | + dummy_dd_dataframe.compute().equals(loaded_data.compute()) |
| 139 | + |
| 140 | + @pytest.mark.parametrize( |
| 141 | + "load_args", [{"k1": "v1", "index": "value"}], indirect=True |
| 142 | + ) |
| 143 | + def test_load_extra_params(self, s3_dataset, load_args): |
| 144 | + """Test overriding the default load arguments.""" |
| 145 | + for key, value in load_args.items(): |
| 146 | + assert s3_dataset._load_args[key] == value |
| 147 | + |
| 148 | + @pytest.mark.parametrize( |
| 149 | + "save_args", [{"k1": "v1", "index": "value"}], indirect=True |
| 150 | + ) |
| 151 | + def test_save_extra_params(self, s3_dataset, save_args): |
| 152 | + """Test overriding the default save arguments.""" |
| 153 | + |
| 154 | + for key, value in save_args.items(): |
| 155 | + assert s3_dataset._save_args[key] == value |
| 156 | + |
| 157 | + for key, value in s3_dataset.DEFAULT_SAVE_ARGS.items(): |
| 158 | + assert s3_dataset._save_args[key] != value |
0 commit comments