From 09a3381b370950980685d0aa2d1292db0d9f34c9 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 20 Nov 2024 11:48:54 -0500 Subject: [PATCH] feat: [google-cloud-datacatalog] A new method `SetConfig` is added to service `DataCatalog` (#13290) BEGIN_COMMIT_OVERRIDE eat: A new method `SetConfig` is added to service `DataCatalog` feat: A new method `RetrieveConfig` is added to service `DataCatalog` feat: A new method `RetrieveEffectiveConfig` is added to service `DataCatalog` feat: A new field `transferred_to_dataplex` is added to message `.google.cloud.datacatalog.v1.EntryGroup` feat: A new message `SetConfigRequest` is added feat: A new message `RetrieveConfigRequest` is added feat: A new message `RetrieveEffectiveConfigRequest` is added feat: A new enum `TagTemplateMigration` is added feat: A new enum `CatalogUIExperience` is added feat: A new message `OrganizationConfig` is added feat: A new message `MigrationConfig` is added feat: A new field `dataplex_transfer_status` is added to message `.google.cloud.datacatalog.v1.Tag` feat: A new value `TRANSFERRED` is added to enum `DataplexTransferStatus` docs: A comment for message `EntryGroup` is changed END_COMMIT_OVERRIDE - [ ] Regenerate this pull request now. feat: A new method `RetrieveConfig` is added to service `DataCatalog` feat: A new method `RetrieveEffectiveConfig` is added to service `DataCatalog` feat: A new field `transferred_to_dataplex` is added to message `.google.cloud.datacatalog.v1.EntryGroup` feat: A new message `SetConfigRequest` is added feat: A new message `RetrieveConfigRequest` is added feat: A new message `RetrieveEffectiveConfigRequest` is added feat: A new enum `TagTemplateMigration` is added feat: A new enum `CatalogUIExperience` is added feat: A new message `OrganizationConfig` is added feat: A new message `MigrationConfig` is added feat: A new field `dataplex_transfer_status` is added to message `.google.cloud.datacatalog.v1.Tag` feat: A new value `TRANSFERRED` is added to enum `DataplexTransferStatus` docs: A comment for message `EntryGroup` is changed PiperOrigin-RevId: 698288089 Source-Link: https://github.com/googleapis/googleapis/commit/6731adf226d459dd82d3159bbe7cd0dc119f77ad Source-Link: https://github.com/googleapis/googleapis-gen/commit/d2bda980b0e2687a78763ed639885324b389a249 Copy-Tag: eyJwIjoicGFja2FnZXMvZ29vZ2xlLWNsb3VkLWRhdGFjYXRhbG9nLy5Pd2xCb3QueWFtbCIsImgiOiJkMmJkYTk4MGIwZTI2ODdhNzg3NjNlZDYzOTg4NTMyNGIzODlhMjQ5In0= --------- Co-authored-by: Owl Bot --- .../google/cloud/datacatalog/__init__.py | 14 + .../google/cloud/datacatalog/gapic_version.py | 2 +- .../google/cloud/datacatalog_v1/__init__.py | 14 + .../cloud/datacatalog_v1/gapic_metadata.json | 30 + .../cloud/datacatalog_v1/gapic_version.py | 2 +- .../services/data_catalog/async_client.py | 274 ++++ .../services/data_catalog/client.py | 270 ++++ .../services/data_catalog/transports/base.py | 44 + .../services/data_catalog/transports/grpc.py | 89 ++ .../data_catalog/transports/grpc_asyncio.py | 109 ++ .../cloud/datacatalog_v1/types/__init__.py | 14 + .../cloud/datacatalog_v1/types/datacatalog.py | 179 +++ .../google/cloud/datacatalog_v1/types/tags.py | 13 + .../datacatalog_v1beta1/gapic_version.py | 2 +- ...ated_data_catalog_retrieve_config_async.py | 52 + ...rated_data_catalog_retrieve_config_sync.py | 52 + ...catalog_retrieve_effective_config_async.py | 52 + ..._catalog_retrieve_effective_config_sync.py | 52 + ...generated_data_catalog_set_config_async.py | 53 + ..._generated_data_catalog_set_config_sync.py | 53 + ..._metadata_google.cloud.datacatalog.v1.json | 461 ++++++- ...data_google.cloud.datacatalog.v1beta1.json | 2 +- .../scripts/fixup_datacatalog_v1_keywords.py | 3 + .../gapic/datacatalog_v1/test_data_catalog.py | 1191 +++++++++++++++-- 24 files changed, 2902 insertions(+), 125 deletions(-) create mode 100644 packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_retrieve_config_async.py create mode 100644 packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_retrieve_config_sync.py create mode 100644 packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_retrieve_effective_config_async.py create mode 100644 packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_retrieve_effective_config_sync.py create mode 100644 packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_set_config_async.py create mode 100644 packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_set_config_sync.py diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog/__init__.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog/__init__.py index da20333a8aae..c9ce4efead46 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog/__init__.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog/__init__.py @@ -47,6 +47,7 @@ from google.cloud.datacatalog_v1.types.data_source import DataSource, StorageProperties from google.cloud.datacatalog_v1.types.datacatalog import ( BusinessContext, + CatalogUIExperience, CloudBigtableInstanceSpec, CloudBigtableSystemSpec, Contacts, @@ -83,21 +84,27 @@ ListTagsResponse, LookerSystemSpec, LookupEntryRequest, + MigrationConfig, ModelSpec, ModifyEntryContactsRequest, ModifyEntryOverviewRequest, + OrganizationConfig, ReconcileTagsMetadata, ReconcileTagsRequest, ReconcileTagsResponse, RenameTagTemplateFieldEnumValueRequest, RenameTagTemplateFieldRequest, + RetrieveConfigRequest, + RetrieveEffectiveConfigRequest, RoutineSpec, SearchCatalogRequest, SearchCatalogResponse, ServiceSpec, + SetConfigRequest, SqlDatabaseSystemSpec, StarEntryRequest, StarEntryResponse, + TagTemplateMigration, UnstarEntryRequest, UnstarEntryResponse, UpdateEntryGroupRequest, @@ -225,18 +232,23 @@ "ListTagsResponse", "LookerSystemSpec", "LookupEntryRequest", + "MigrationConfig", "ModelSpec", "ModifyEntryContactsRequest", "ModifyEntryOverviewRequest", + "OrganizationConfig", "ReconcileTagsMetadata", "ReconcileTagsRequest", "ReconcileTagsResponse", "RenameTagTemplateFieldEnumValueRequest", "RenameTagTemplateFieldRequest", + "RetrieveConfigRequest", + "RetrieveEffectiveConfigRequest", "RoutineSpec", "SearchCatalogRequest", "SearchCatalogResponse", "ServiceSpec", + "SetConfigRequest", "SqlDatabaseSystemSpec", "StarEntryRequest", "StarEntryResponse", @@ -250,7 +262,9 @@ "VertexDatasetSpec", "VertexModelSourceInfo", "VertexModelSpec", + "CatalogUIExperience", "EntryType", + "TagTemplateMigration", "DataplexExternalTable", "DataplexFilesetSpec", "DataplexSpec", diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog/gapic_version.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog/gapic_version.py index 3ed830e26f63..558c8aab67c5 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog/gapic_version.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.22.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/__init__.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/__init__.py index 536da4f57ba5..da1c95fc105f 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/__init__.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/__init__.py @@ -36,6 +36,7 @@ from .types.data_source import DataSource, StorageProperties from .types.datacatalog import ( BusinessContext, + CatalogUIExperience, CloudBigtableInstanceSpec, CloudBigtableSystemSpec, Contacts, @@ -72,21 +73,27 @@ ListTagsResponse, LookerSystemSpec, LookupEntryRequest, + MigrationConfig, ModelSpec, ModifyEntryContactsRequest, ModifyEntryOverviewRequest, + OrganizationConfig, ReconcileTagsMetadata, ReconcileTagsRequest, ReconcileTagsResponse, RenameTagTemplateFieldEnumValueRequest, RenameTagTemplateFieldRequest, + RetrieveConfigRequest, + RetrieveEffectiveConfigRequest, RoutineSpec, SearchCatalogRequest, SearchCatalogResponse, ServiceSpec, + SetConfigRequest, SqlDatabaseSystemSpec, StarEntryRequest, StarEntryResponse, + TagTemplateMigration, UnstarEntryRequest, UnstarEntryResponse, UpdateEntryGroupRequest, @@ -156,6 +163,7 @@ "BigQueryRoutineSpec", "BigQueryTableSpec", "BusinessContext", + "CatalogUIExperience", "CloudBigtableInstanceSpec", "CloudBigtableSystemSpec", "CloudSqlBigQueryConnectionSpec", @@ -223,9 +231,11 @@ "LookerSystemSpec", "LookupEntryRequest", "ManagingSystem", + "MigrationConfig", "ModelSpec", "ModifyEntryContactsRequest", "ModifyEntryOverviewRequest", + "OrganizationConfig", "PersonalDetails", "PhysicalSchema", "PolicyTag", @@ -237,6 +247,8 @@ "RenameTagTemplateFieldEnumValueRequest", "RenameTagTemplateFieldRequest", "ReplaceTaxonomyRequest", + "RetrieveConfigRequest", + "RetrieveEffectiveConfigRequest", "RoutineSpec", "Schema", "SearchCatalogRequest", @@ -246,6 +258,7 @@ "SerializedPolicyTag", "SerializedTaxonomy", "ServiceSpec", + "SetConfigRequest", "SqlDatabaseSystemSpec", "StarEntryRequest", "StarEntryResponse", @@ -257,6 +270,7 @@ "TagField", "TagTemplate", "TagTemplateField", + "TagTemplateMigration", "TaggedEntry", "Taxonomy", "UnstarEntryRequest", diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/gapic_metadata.json b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/gapic_metadata.json index 447d15595848..bcda1cf8ed28 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/gapic_metadata.json +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/gapic_metadata.json @@ -130,11 +130,26 @@ "rename_tag_template_field_enum_value" ] }, + "RetrieveConfig": { + "methods": [ + "retrieve_config" + ] + }, + "RetrieveEffectiveConfig": { + "methods": [ + "retrieve_effective_config" + ] + }, "SearchCatalog": { "methods": [ "search_catalog" ] }, + "SetConfig": { + "methods": [ + "set_config" + ] + }, "SetIamPolicy": { "methods": [ "set_iam_policy" @@ -305,11 +320,26 @@ "rename_tag_template_field_enum_value" ] }, + "RetrieveConfig": { + "methods": [ + "retrieve_config" + ] + }, + "RetrieveEffectiveConfig": { + "methods": [ + "retrieve_effective_config" + ] + }, "SearchCatalog": { "methods": [ "search_catalog" ] }, + "SetConfig": { + "methods": [ + "set_config" + ] + }, "SetIamPolicy": { "methods": [ "set_iam_policy" diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/gapic_version.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/gapic_version.py index 3ed830e26f63..558c8aab67c5 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/gapic_version.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.22.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/async_client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/async_client.py index 6785e64350d2..defe63655802 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/async_client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/async_client.py @@ -4547,6 +4547,280 @@ async def sample_import_entries(): # Done; return the response. return response + async def set_config( + self, + request: Optional[Union[datacatalog.SetConfigRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.MigrationConfig: + r"""Sets the configuration related to the migration to + Dataplex for an organization or project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_set_config(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.SetConfigRequest( + tag_template_migration="TAG_TEMPLATE_MIGRATION_DISABLED", + name="name_value", + ) + + # Make the request + response = await client.set_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.SetConfigRequest, dict]]): + The request object. Request message for + [SetConfig][google.cloud.datacatalog.v1.DataCatalog.SetConfig]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.MigrationConfig: + The configuration related to the migration to Dataplex applied to an + organization or project. It is the response message + for + [SetConfig][google.cloud.datacatalog.v1.DataCatalog.SetConfig] + and + [RetrieveEffectiveConfig][google.cloud.datacatalog.v1.DataCatalog.RetrieveEffectiveConfig]. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datacatalog.SetConfigRequest): + request = datacatalog.SetConfigRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.set_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def retrieve_config( + self, + request: Optional[Union[datacatalog.RetrieveConfigRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.OrganizationConfig: + r"""Retrieves the configuration related to the migration + from Data Catalog to Dataplex for a specific + organization, including all the projects under it which + have a separate configuration set. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_retrieve_config(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.RetrieveConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.retrieve_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.RetrieveConfigRequest, dict]]): + The request object. Request message for + [RetrieveConfig][google.cloud.datacatalog.v1.DataCatalog.RetrieveConfig]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.OrganizationConfig: + The configuration related to the migration from Data Catalog to Dataplex that + has been applied to an organization and any projects + under it. It is the response message for + [RetrieveConfig][google.cloud.datacatalog.v1.DataCatalog.RetrieveConfig]. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datacatalog.RetrieveConfigRequest): + request = datacatalog.RetrieveConfigRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.retrieve_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def retrieve_effective_config( + self, + request: Optional[ + Union[datacatalog.RetrieveEffectiveConfigRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.MigrationConfig: + r"""Retrieves the effective configuration related to the + migration from Data Catalog to Dataplex for a specific + organization or project. If there is no specific + configuration set for the resource, the setting is + checked hierarchicahlly through the ancestors of the + resource, starting from the resource itself. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + async def sample_retrieve_effective_config(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.RetrieveEffectiveConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.retrieve_effective_config(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.datacatalog_v1.types.RetrieveEffectiveConfigRequest, dict]]): + The request object. Request message for + [RetrieveEffectiveConfig][google.cloud.datacatalog.v1.DataCatalog.RetrieveEffectiveConfig]. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.MigrationConfig: + The configuration related to the migration to Dataplex applied to an + organization or project. It is the response message + for + [SetConfig][google.cloud.datacatalog.v1.DataCatalog.SetConfig] + and + [RetrieveEffectiveConfig][google.cloud.datacatalog.v1.DataCatalog.RetrieveEffectiveConfig]. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datacatalog.RetrieveEffectiveConfigRequest): + request = datacatalog.RetrieveEffectiveConfigRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.retrieve_effective_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/client.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/client.py index 44b6750c9ecd..c0cee840f1c2 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/client.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/client.py @@ -4964,6 +4964,276 @@ def sample_import_entries(): # Done; return the response. return response + def set_config( + self, + request: Optional[Union[datacatalog.SetConfigRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.MigrationConfig: + r"""Sets the configuration related to the migration to + Dataplex for an organization or project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_set_config(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.SetConfigRequest( + tag_template_migration="TAG_TEMPLATE_MIGRATION_DISABLED", + name="name_value", + ) + + # Make the request + response = client.set_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.SetConfigRequest, dict]): + The request object. Request message for + [SetConfig][google.cloud.datacatalog.v1.DataCatalog.SetConfig]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.MigrationConfig: + The configuration related to the migration to Dataplex applied to an + organization or project. It is the response message + for + [SetConfig][google.cloud.datacatalog.v1.DataCatalog.SetConfig] + and + [RetrieveEffectiveConfig][google.cloud.datacatalog.v1.DataCatalog.RetrieveEffectiveConfig]. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datacatalog.SetConfigRequest): + request = datacatalog.SetConfigRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def retrieve_config( + self, + request: Optional[Union[datacatalog.RetrieveConfigRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.OrganizationConfig: + r"""Retrieves the configuration related to the migration + from Data Catalog to Dataplex for a specific + organization, including all the projects under it which + have a separate configuration set. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_retrieve_config(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.RetrieveConfigRequest( + name="name_value", + ) + + # Make the request + response = client.retrieve_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.RetrieveConfigRequest, dict]): + The request object. Request message for + [RetrieveConfig][google.cloud.datacatalog.v1.DataCatalog.RetrieveConfig]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.OrganizationConfig: + The configuration related to the migration from Data Catalog to Dataplex that + has been applied to an organization and any projects + under it. It is the response message for + [RetrieveConfig][google.cloud.datacatalog.v1.DataCatalog.RetrieveConfig]. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datacatalog.RetrieveConfigRequest): + request = datacatalog.RetrieveConfigRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.retrieve_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def retrieve_effective_config( + self, + request: Optional[ + Union[datacatalog.RetrieveEffectiveConfigRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> datacatalog.MigrationConfig: + r"""Retrieves the effective configuration related to the + migration from Data Catalog to Dataplex for a specific + organization or project. If there is no specific + configuration set for the resource, the setting is + checked hierarchicahlly through the ancestors of the + resource, starting from the resource itself. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import datacatalog_v1 + + def sample_retrieve_effective_config(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.RetrieveEffectiveConfigRequest( + name="name_value", + ) + + # Make the request + response = client.retrieve_effective_config(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.datacatalog_v1.types.RetrieveEffectiveConfigRequest, dict]): + The request object. Request message for + [RetrieveEffectiveConfig][google.cloud.datacatalog.v1.DataCatalog.RetrieveEffectiveConfig]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.datacatalog_v1.types.MigrationConfig: + The configuration related to the migration to Dataplex applied to an + organization or project. It is the response message + for + [SetConfig][google.cloud.datacatalog.v1.DataCatalog.SetConfig] + and + [RetrieveEffectiveConfig][google.cloud.datacatalog.v1.DataCatalog.RetrieveEffectiveConfig]. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, datacatalog.RetrieveEffectiveConfigRequest): + request = datacatalog.RetrieveEffectiveConfigRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.retrieve_effective_config + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "DataCatalogClient": return self diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/transports/base.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/transports/base.py index 76010556d82d..ffbaac52f32a 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/transports/base.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/transports/base.py @@ -302,6 +302,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.set_config: gapic_v1.method.wrap_method( + self.set_config, + default_timeout=None, + client_info=client_info, + ), + self.retrieve_config: gapic_v1.method.wrap_method( + self.retrieve_config, + default_timeout=None, + client_info=client_info, + ), + self.retrieve_effective_config: gapic_v1.method.wrap_method( + self.retrieve_effective_config, + default_timeout=None, + client_info=client_info, + ), self.cancel_operation: gapic_v1.method.wrap_method( self.cancel_operation, default_timeout=None, @@ -651,6 +666,35 @@ def import_entries( ]: raise NotImplementedError() + @property + def set_config( + self, + ) -> Callable[ + [datacatalog.SetConfigRequest], + Union[datacatalog.MigrationConfig, Awaitable[datacatalog.MigrationConfig]], + ]: + raise NotImplementedError() + + @property + def retrieve_config( + self, + ) -> Callable[ + [datacatalog.RetrieveConfigRequest], + Union[ + datacatalog.OrganizationConfig, Awaitable[datacatalog.OrganizationConfig] + ], + ]: + raise NotImplementedError() + + @property + def retrieve_effective_config( + self, + ) -> Callable[ + [datacatalog.RetrieveEffectiveConfigRequest], + Union[datacatalog.MigrationConfig, Awaitable[datacatalog.MigrationConfig]], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py index 338b1b7abf79..0dbeea324ccf 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py @@ -1392,6 +1392,95 @@ def import_entries( ) return self._stubs["import_entries"] + @property + def set_config( + self, + ) -> Callable[[datacatalog.SetConfigRequest], datacatalog.MigrationConfig]: + r"""Return a callable for the set config method over gRPC. + + Sets the configuration related to the migration to + Dataplex for an organization or project. + + Returns: + Callable[[~.SetConfigRequest], + ~.MigrationConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_config" not in self._stubs: + self._stubs["set_config"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.DataCatalog/SetConfig", + request_serializer=datacatalog.SetConfigRequest.serialize, + response_deserializer=datacatalog.MigrationConfig.deserialize, + ) + return self._stubs["set_config"] + + @property + def retrieve_config( + self, + ) -> Callable[[datacatalog.RetrieveConfigRequest], datacatalog.OrganizationConfig]: + r"""Return a callable for the retrieve config method over gRPC. + + Retrieves the configuration related to the migration + from Data Catalog to Dataplex for a specific + organization, including all the projects under it which + have a separate configuration set. + + Returns: + Callable[[~.RetrieveConfigRequest], + ~.OrganizationConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "retrieve_config" not in self._stubs: + self._stubs["retrieve_config"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.DataCatalog/RetrieveConfig", + request_serializer=datacatalog.RetrieveConfigRequest.serialize, + response_deserializer=datacatalog.OrganizationConfig.deserialize, + ) + return self._stubs["retrieve_config"] + + @property + def retrieve_effective_config( + self, + ) -> Callable[ + [datacatalog.RetrieveEffectiveConfigRequest], datacatalog.MigrationConfig + ]: + r"""Return a callable for the retrieve effective config method over gRPC. + + Retrieves the effective configuration related to the + migration from Data Catalog to Dataplex for a specific + organization or project. If there is no specific + configuration set for the resource, the setting is + checked hierarchicahlly through the ancestors of the + resource, starting from the resource itself. + + Returns: + Callable[[~.RetrieveEffectiveConfigRequest], + ~.MigrationConfig]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "retrieve_effective_config" not in self._stubs: + self._stubs["retrieve_effective_config"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.DataCatalog/RetrieveEffectiveConfig", + request_serializer=datacatalog.RetrieveEffectiveConfigRequest.serialize, + response_deserializer=datacatalog.MigrationConfig.deserialize, + ) + return self._stubs["retrieve_effective_config"] + def close(self): self.grpc_channel.close() diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py index 16447f749ac4..e7411f28bd1b 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py @@ -1442,6 +1442,100 @@ def import_entries( ) return self._stubs["import_entries"] + @property + def set_config( + self, + ) -> Callable[ + [datacatalog.SetConfigRequest], Awaitable[datacatalog.MigrationConfig] + ]: + r"""Return a callable for the set config method over gRPC. + + Sets the configuration related to the migration to + Dataplex for an organization or project. + + Returns: + Callable[[~.SetConfigRequest], + Awaitable[~.MigrationConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_config" not in self._stubs: + self._stubs["set_config"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.DataCatalog/SetConfig", + request_serializer=datacatalog.SetConfigRequest.serialize, + response_deserializer=datacatalog.MigrationConfig.deserialize, + ) + return self._stubs["set_config"] + + @property + def retrieve_config( + self, + ) -> Callable[ + [datacatalog.RetrieveConfigRequest], Awaitable[datacatalog.OrganizationConfig] + ]: + r"""Return a callable for the retrieve config method over gRPC. + + Retrieves the configuration related to the migration + from Data Catalog to Dataplex for a specific + organization, including all the projects under it which + have a separate configuration set. + + Returns: + Callable[[~.RetrieveConfigRequest], + Awaitable[~.OrganizationConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "retrieve_config" not in self._stubs: + self._stubs["retrieve_config"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.DataCatalog/RetrieveConfig", + request_serializer=datacatalog.RetrieveConfigRequest.serialize, + response_deserializer=datacatalog.OrganizationConfig.deserialize, + ) + return self._stubs["retrieve_config"] + + @property + def retrieve_effective_config( + self, + ) -> Callable[ + [datacatalog.RetrieveEffectiveConfigRequest], + Awaitable[datacatalog.MigrationConfig], + ]: + r"""Return a callable for the retrieve effective config method over gRPC. + + Retrieves the effective configuration related to the + migration from Data Catalog to Dataplex for a specific + organization or project. If there is no specific + configuration set for the resource, the setting is + checked hierarchicahlly through the ancestors of the + resource, starting from the resource itself. + + Returns: + Callable[[~.RetrieveEffectiveConfigRequest], + Awaitable[~.MigrationConfig]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "retrieve_effective_config" not in self._stubs: + self._stubs["retrieve_effective_config"] = self.grpc_channel.unary_unary( + "/google.cloud.datacatalog.v1.DataCatalog/RetrieveEffectiveConfig", + request_serializer=datacatalog.RetrieveEffectiveConfigRequest.serialize, + response_deserializer=datacatalog.MigrationConfig.deserialize, + ) + return self._stubs["retrieve_effective_config"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -1615,6 +1709,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.set_config: self._wrap_method( + self.set_config, + default_timeout=None, + client_info=client_info, + ), + self.retrieve_config: self._wrap_method( + self.retrieve_config, + default_timeout=None, + client_info=client_info, + ), + self.retrieve_effective_config: self._wrap_method( + self.retrieve_effective_config, + default_timeout=None, + client_info=client_info, + ), self.cancel_operation: self._wrap_method( self.cancel_operation, default_timeout=None, diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/__init__.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/__init__.py index 51f266aa7f41..f78dad0ea274 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/__init__.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/__init__.py @@ -22,6 +22,7 @@ from .data_source import DataSource, StorageProperties from .datacatalog import ( BusinessContext, + CatalogUIExperience, CloudBigtableInstanceSpec, CloudBigtableSystemSpec, Contacts, @@ -58,21 +59,27 @@ ListTagsResponse, LookerSystemSpec, LookupEntryRequest, + MigrationConfig, ModelSpec, ModifyEntryContactsRequest, ModifyEntryOverviewRequest, + OrganizationConfig, ReconcileTagsMetadata, ReconcileTagsRequest, ReconcileTagsResponse, RenameTagTemplateFieldEnumValueRequest, RenameTagTemplateFieldRequest, + RetrieveConfigRequest, + RetrieveEffectiveConfigRequest, RoutineSpec, SearchCatalogRequest, SearchCatalogResponse, ServiceSpec, + SetConfigRequest, SqlDatabaseSystemSpec, StarEntryRequest, StarEntryResponse, + TagTemplateMigration, UnstarEntryRequest, UnstarEntryResponse, UpdateEntryGroupRequest, @@ -178,18 +185,23 @@ "ListTagsResponse", "LookerSystemSpec", "LookupEntryRequest", + "MigrationConfig", "ModelSpec", "ModifyEntryContactsRequest", "ModifyEntryOverviewRequest", + "OrganizationConfig", "ReconcileTagsMetadata", "ReconcileTagsRequest", "ReconcileTagsResponse", "RenameTagTemplateFieldEnumValueRequest", "RenameTagTemplateFieldRequest", + "RetrieveConfigRequest", + "RetrieveEffectiveConfigRequest", "RoutineSpec", "SearchCatalogRequest", "SearchCatalogResponse", "ServiceSpec", + "SetConfigRequest", "SqlDatabaseSystemSpec", "StarEntryRequest", "StarEntryResponse", @@ -203,7 +215,9 @@ "VertexDatasetSpec", "VertexModelSourceInfo", "VertexModelSpec", + "CatalogUIExperience", "EntryType", + "TagTemplateMigration", "DataplexExternalTable", "DataplexFilesetSpec", "DataplexSpec", diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/datacatalog.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/datacatalog.py index fc838af60aad..e1b3158dc8f7 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/datacatalog.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/datacatalog.py @@ -34,6 +34,8 @@ package="google.cloud.datacatalog.v1", manifest={ "EntryType", + "TagTemplateMigration", + "CatalogUIExperience", "SearchCatalogRequest", "SearchCatalogResponse", "CreateEntryGroupRequest", @@ -95,6 +97,11 @@ "ImportEntriesMetadata", "ModifyEntryOverviewRequest", "ModifyEntryContactsRequest", + "SetConfigRequest", + "RetrieveConfigRequest", + "RetrieveEffectiveConfigRequest", + "OrganizationConfig", + "MigrationConfig", }, ) @@ -193,6 +200,43 @@ class EntryType(proto.Enum): FEATURE_GROUP = 21 +class TagTemplateMigration(proto.Enum): + r"""Configuration related to the opt-in status for the migration + of TagTemplates to Dataplex. + + Values: + TAG_TEMPLATE_MIGRATION_UNSPECIFIED (0): + Default value. Migration of Tag Templates + from Data Catalog to Dataplex is not performed. + TAG_TEMPLATE_MIGRATION_ENABLED (1): + Migration of Tag Templates from Data Catalog + to Dataplex is enabled. + TAG_TEMPLATE_MIGRATION_DISABLED (2): + Migration of Tag Templates from Data Catalog + to Dataplex is disabled. + """ + TAG_TEMPLATE_MIGRATION_UNSPECIFIED = 0 + TAG_TEMPLATE_MIGRATION_ENABLED = 1 + TAG_TEMPLATE_MIGRATION_DISABLED = 2 + + +class CatalogUIExperience(proto.Enum): + r"""Configuration related to the opt-in status for the UI switch + to Dataplex. + + Values: + CATALOG_UI_EXPERIENCE_UNSPECIFIED (0): + Default value. The default UI is Dataplex. + CATALOG_UI_EXPERIENCE_ENABLED (1): + The UI is Dataplex. + CATALOG_UI_EXPERIENCE_DISABLED (2): + The UI is Data Catalog. + """ + CATALOG_UI_EXPERIENCE_UNSPECIFIED = 0 + CATALOG_UI_EXPERIENCE_ENABLED = 1 + CATALOG_UI_EXPERIENCE_DISABLED = 2 + + class SearchCatalogRequest(proto.Message): r"""Request message for [SearchCatalog][google.cloud.datacatalog.v1.DataCatalog.SearchCatalog]. @@ -1978,6 +2022,13 @@ class EntryGroup(proto.Message): data_catalog_timestamps (google.cloud.datacatalog_v1.types.SystemTimestamps): Output only. Timestamps of the entry group. Default value is empty. + transferred_to_dataplex (bool): + Optional. When set to [true], it means DataCatalog + EntryGroup was transferred to Dataplex Catalog Service. It + makes EntryGroup and its Entries to be read-only in + DataCatalog. However, new Tags on EntryGroup and its Entries + can be created. After setting the flag to [true] it cannot + be unset. """ name: str = proto.Field( @@ -1997,6 +2048,10 @@ class EntryGroup(proto.Message): number=4, message=timestamps.SystemTimestamps, ) + transferred_to_dataplex: bool = proto.Field( + proto.BOOL, + number=9, + ) class CreateTagTemplateRequest(proto.Message): @@ -2797,4 +2852,128 @@ class ModifyEntryContactsRequest(proto.Message): ) +class SetConfigRequest(proto.Message): + r"""Request message for + [SetConfig][google.cloud.datacatalog.v1.DataCatalog.SetConfig]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Required. The organization or project whose + config is being specified. + tag_template_migration (google.cloud.datacatalog_v1.types.TagTemplateMigration): + Opt-in status for the migration of Tag + Templates to Dataplex. + + This field is a member of `oneof`_ ``configuration``. + catalog_ui_experience (google.cloud.datacatalog_v1.types.CatalogUIExperience): + Opt-in status for the UI switch to Dataplex. + + This field is a member of `oneof`_ ``configuration``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + tag_template_migration: "TagTemplateMigration" = proto.Field( + proto.ENUM, + number=2, + oneof="configuration", + enum="TagTemplateMigration", + ) + catalog_ui_experience: "CatalogUIExperience" = proto.Field( + proto.ENUM, + number=3, + oneof="configuration", + enum="CatalogUIExperience", + ) + + +class RetrieveConfigRequest(proto.Message): + r"""Request message for + [RetrieveConfig][google.cloud.datacatalog.v1.DataCatalog.RetrieveConfig]. + + Attributes: + name (str): + Required. The organization whose config is + being retrieved. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class RetrieveEffectiveConfigRequest(proto.Message): + r"""Request message for + [RetrieveEffectiveConfig][google.cloud.datacatalog.v1.DataCatalog.RetrieveEffectiveConfig]. + + Attributes: + name (str): + Required. The resource whose effective config + is being retrieved. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class OrganizationConfig(proto.Message): + r"""The configuration related to the migration from Data Catalog to + Dataplex that has been applied to an organization and any projects + under it. It is the response message for + [RetrieveConfig][google.cloud.datacatalog.v1.DataCatalog.RetrieveConfig]. + + Attributes: + config (MutableMapping[str, google.cloud.datacatalog_v1.types.MigrationConfig]): + Map of organizations and project resource names and their + configuration. The format for the map keys is + ``organizations/{organizationId}`` or + ``projects/{projectId}``. + """ + + config: MutableMapping[str, "MigrationConfig"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=1, + message="MigrationConfig", + ) + + +class MigrationConfig(proto.Message): + r"""The configuration related to the migration to Dataplex applied to an + organization or project. It is the response message for + [SetConfig][google.cloud.datacatalog.v1.DataCatalog.SetConfig] and + [RetrieveEffectiveConfig][google.cloud.datacatalog.v1.DataCatalog.RetrieveEffectiveConfig]. + + Attributes: + tag_template_migration (google.cloud.datacatalog_v1.types.TagTemplateMigration): + Opt-in status for the migration of Tag + Templates to Dataplex. + catalog_ui_experience (google.cloud.datacatalog_v1.types.CatalogUIExperience): + Opt-in status for the UI switch to Dataplex. + """ + + tag_template_migration: "TagTemplateMigration" = proto.Field( + proto.ENUM, + number=1, + enum="TagTemplateMigration", + ) + catalog_ui_experience: "CatalogUIExperience" = proto.Field( + proto.ENUM, + number=2, + enum="CatalogUIExperience", + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/tags.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/tags.py index 34eb0556571a..8d35723897b6 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/tags.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1/types/tags.py @@ -79,6 +79,9 @@ class Tag(proto.Message): Tag template defines valid field IDs. A tag must have at least 1 field and at most 500 fields. + dataplex_transfer_status (google.cloud.datacatalog_v1.types.TagTemplate.DataplexTransferStatus): + Output only. Denotes the transfer status of + the Tag Template. """ name: str = proto.Field( @@ -104,6 +107,11 @@ class Tag(proto.Message): number=3, message="TagField", ) + dataplex_transfer_status: "TagTemplate.DataplexTransferStatus" = proto.Field( + proto.ENUM, + number=7, + enum="TagTemplate.DataplexTransferStatus", + ) class TagField(proto.Message): @@ -288,9 +296,14 @@ class DataplexTransferStatus(proto.Enum): Deprecated: Individual TagTemplate migration is deprecated in favor of organization or project wide TagTemplate migration opt-in. + TRANSFERRED (2): + TagTemplate and its tags are auto-copied to + Dataplex service. Visible in both services. + Editable in Dataplex, read-only in DataCatalog. """ DATAPLEX_TRANSFER_STATUS_UNSPECIFIED = 0 MIGRATED = 1 + TRANSFERRED = 2 name: str = proto.Field( proto.STRING, diff --git a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/gapic_version.py b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/gapic_version.py index 3ed830e26f63..558c8aab67c5 100644 --- a/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/gapic_version.py +++ b/packages/google-cloud-datacatalog/google/cloud/datacatalog_v1beta1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.22.0" # {x-release-please-version} +__version__ = "0.0.0" # {x-release-please-version} diff --git a/packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_retrieve_config_async.py b/packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_retrieve_config_async.py new file mode 100644 index 000000000000..97ebff6611aa --- /dev/null +++ b/packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_retrieve_config_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RetrieveConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_RetrieveConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_retrieve_config(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.RetrieveConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.retrieve_config(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_RetrieveConfig_async] diff --git a/packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_retrieve_config_sync.py b/packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_retrieve_config_sync.py new file mode 100644 index 000000000000..1f483004ba70 --- /dev/null +++ b/packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_retrieve_config_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RetrieveConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_RetrieveConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_retrieve_config(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.RetrieveConfigRequest( + name="name_value", + ) + + # Make the request + response = client.retrieve_config(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_RetrieveConfig_sync] diff --git a/packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_retrieve_effective_config_async.py b/packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_retrieve_effective_config_async.py new file mode 100644 index 000000000000..b75db3d31336 --- /dev/null +++ b/packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_retrieve_effective_config_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RetrieveEffectiveConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_RetrieveEffectiveConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_retrieve_effective_config(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.RetrieveEffectiveConfigRequest( + name="name_value", + ) + + # Make the request + response = await client.retrieve_effective_config(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_RetrieveEffectiveConfig_async] diff --git a/packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_retrieve_effective_config_sync.py b/packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_retrieve_effective_config_sync.py new file mode 100644 index 000000000000..529539740e75 --- /dev/null +++ b/packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_retrieve_effective_config_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RetrieveEffectiveConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_RetrieveEffectiveConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_retrieve_effective_config(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.RetrieveEffectiveConfigRequest( + name="name_value", + ) + + # Make the request + response = client.retrieve_effective_config(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_RetrieveEffectiveConfig_sync] diff --git a/packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_set_config_async.py b/packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_set_config_async.py new file mode 100644 index 000000000000..20268f628fa3 --- /dev/null +++ b/packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_set_config_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_SetConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +async def sample_set_config(): + # Create a client + client = datacatalog_v1.DataCatalogAsyncClient() + + # Initialize request argument(s) + request = datacatalog_v1.SetConfigRequest( + tag_template_migration="TAG_TEMPLATE_MIGRATION_DISABLED", + name="name_value", + ) + + # Make the request + response = await client.set_config(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_SetConfig_async] diff --git a/packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_set_config_sync.py b/packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_set_config_sync.py new file mode 100644 index 000000000000..a65c53538b56 --- /dev/null +++ b/packages/google-cloud-datacatalog/samples/generated_samples/datacatalog_v1_generated_data_catalog_set_config_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-datacatalog + + +# [START datacatalog_v1_generated_DataCatalog_SetConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import datacatalog_v1 + + +def sample_set_config(): + # Create a client + client = datacatalog_v1.DataCatalogClient() + + # Initialize request argument(s) + request = datacatalog_v1.SetConfigRequest( + tag_template_migration="TAG_TEMPLATE_MIGRATION_DISABLED", + name="name_value", + ) + + # Make the request + response = client.set_config(request=request) + + # Handle the response + print(response) + +# [END datacatalog_v1_generated_DataCatalog_SetConfig_sync] diff --git a/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json b/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json index a9896acaeca0..601d0d10214c 100644 --- a/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json +++ b/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datacatalog", - "version": "3.22.0" + "version": "0.1.0" }, "snippets": [ { @@ -3917,6 +3917,312 @@ ], "title": "datacatalog_v1_generated_data_catalog_rename_tag_template_field_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.retrieve_config", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.RetrieveConfig", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "RetrieveConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.RetrieveConfigRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.OrganizationConfig", + "shortName": "retrieve_config" + }, + "description": "Sample for RetrieveConfig", + "file": "datacatalog_v1_generated_data_catalog_retrieve_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_RetrieveConfig_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_retrieve_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.retrieve_config", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.RetrieveConfig", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "RetrieveConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.RetrieveConfigRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.OrganizationConfig", + "shortName": "retrieve_config" + }, + "description": "Sample for RetrieveConfig", + "file": "datacatalog_v1_generated_data_catalog_retrieve_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_RetrieveConfig_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_retrieve_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.retrieve_effective_config", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.RetrieveEffectiveConfig", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "RetrieveEffectiveConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.RetrieveEffectiveConfigRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.MigrationConfig", + "shortName": "retrieve_effective_config" + }, + "description": "Sample for RetrieveEffectiveConfig", + "file": "datacatalog_v1_generated_data_catalog_retrieve_effective_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_RetrieveEffectiveConfig_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_retrieve_effective_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.retrieve_effective_config", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.RetrieveEffectiveConfig", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "RetrieveEffectiveConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.RetrieveEffectiveConfigRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.MigrationConfig", + "shortName": "retrieve_effective_config" + }, + "description": "Sample for RetrieveEffectiveConfig", + "file": "datacatalog_v1_generated_data_catalog_retrieve_effective_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_RetrieveEffectiveConfig_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_retrieve_effective_config_sync.py" + }, { "canonical": true, "clientMethod": { @@ -4086,6 +4392,159 @@ ], "title": "datacatalog_v1_generated_data_catalog_search_catalog_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient", + "shortName": "DataCatalogAsyncClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogAsyncClient.set_config", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.SetConfig", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "SetConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.SetConfigRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.MigrationConfig", + "shortName": "set_config" + }, + "description": "Sample for SetConfig", + "file": "datacatalog_v1_generated_data_catalog_set_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_SetConfig_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_set_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient", + "shortName": "DataCatalogClient" + }, + "fullName": "google.cloud.datacatalog_v1.DataCatalogClient.set_config", + "method": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog.SetConfig", + "service": { + "fullName": "google.cloud.datacatalog.v1.DataCatalog", + "shortName": "DataCatalog" + }, + "shortName": "SetConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.datacatalog_v1.types.SetConfigRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.datacatalog_v1.types.MigrationConfig", + "shortName": "set_config" + }, + "description": "Sample for SetConfig", + "file": "datacatalog_v1_generated_data_catalog_set_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "datacatalog_v1_generated_DataCatalog_SetConfig_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "datacatalog_v1_generated_data_catalog_set_config_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json b/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json index 5612cc51dcea..14e0e75feb01 100644 --- a/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json +++ b/packages/google-cloud-datacatalog/samples/generated_samples/snippet_metadata_google.cloud.datacatalog.v1beta1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-datacatalog", - "version": "3.22.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-datacatalog/scripts/fixup_datacatalog_v1_keywords.py b/packages/google-cloud-datacatalog/scripts/fixup_datacatalog_v1_keywords.py index 3b62cccc893b..f975e4723103 100644 --- a/packages/google-cloud-datacatalog/scripts/fixup_datacatalog_v1_keywords.py +++ b/packages/google-cloud-datacatalog/scripts/fixup_datacatalog_v1_keywords.py @@ -74,7 +74,10 @@ class datacatalogCallTransformer(cst.CSTTransformer): 'rename_tag_template_field': ('name', 'new_tag_template_field_id', ), 'rename_tag_template_field_enum_value': ('name', 'new_enum_value_display_name', ), 'replace_taxonomy': ('name', 'serialized_taxonomy', ), + 'retrieve_config': ('name', ), + 'retrieve_effective_config': ('name', ), 'search_catalog': ('scope', 'query', 'page_size', 'page_token', 'order_by', 'admin_search', ), + 'set_config': ('name', 'tag_template_migration', 'catalog_ui_experience', ), 'set_iam_policy': ('resource', 'policy', 'update_mask', ), 'star_entry': ('name', ), 'test_iam_permissions': ('resource', 'permissions', ), diff --git a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_data_catalog.py b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_data_catalog.py index 11794b0701f9..2fa4a93c0d42 100644 --- a/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_data_catalog.py +++ b/packages/google-cloud-datacatalog/tests/unit/gapic/datacatalog_v1/test_data_catalog.py @@ -1544,6 +1544,7 @@ def test_create_entry_group(request_type, transport: str = "grpc"): name="name_value", display_name="display_name_value", description="description_value", + transferred_to_dataplex=True, ) response = client.create_entry_group(request) @@ -1558,6 +1559,7 @@ def test_create_entry_group(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" + assert response.transferred_to_dataplex is True def test_create_entry_group_non_empty_request_with_auto_populated_field(): @@ -1696,6 +1698,7 @@ async def test_create_entry_group_async( name="name_value", display_name="display_name_value", description="description_value", + transferred_to_dataplex=True, ) ) response = await client.create_entry_group(request) @@ -1711,6 +1714,7 @@ async def test_create_entry_group_async( assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" + assert response.transferred_to_dataplex is True @pytest.mark.asyncio @@ -1913,6 +1917,7 @@ def test_get_entry_group(request_type, transport: str = "grpc"): name="name_value", display_name="display_name_value", description="description_value", + transferred_to_dataplex=True, ) response = client.get_entry_group(request) @@ -1927,6 +1932,7 @@ def test_get_entry_group(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" + assert response.transferred_to_dataplex is True def test_get_entry_group_non_empty_request_with_auto_populated_field(): @@ -2055,6 +2061,7 @@ async def test_get_entry_group_async( name="name_value", display_name="display_name_value", description="description_value", + transferred_to_dataplex=True, ) ) response = await client.get_entry_group(request) @@ -2070,6 +2077,7 @@ async def test_get_entry_group_async( assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" + assert response.transferred_to_dataplex is True @pytest.mark.asyncio @@ -2256,6 +2264,7 @@ def test_update_entry_group(request_type, transport: str = "grpc"): name="name_value", display_name="display_name_value", description="description_value", + transferred_to_dataplex=True, ) response = client.update_entry_group(request) @@ -2270,6 +2279,7 @@ def test_update_entry_group(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" + assert response.transferred_to_dataplex is True def test_update_entry_group_non_empty_request_with_auto_populated_field(): @@ -2402,6 +2412,7 @@ async def test_update_entry_group_async( name="name_value", display_name="display_name_value", description="description_value", + transferred_to_dataplex=True, ) ) response = await client.update_entry_group(request) @@ -2417,6 +2428,7 @@ async def test_update_entry_group_async( assert response.name == "name_value" assert response.display_name == "display_name_value" assert response.description == "description_value" + assert response.transferred_to_dataplex is True @pytest.mark.asyncio @@ -9319,6 +9331,7 @@ def test_create_tag(request_type, transport: str = "grpc"): name="name_value", template="template_value", template_display_name="template_display_name_value", + dataplex_transfer_status=tags.TagTemplate.DataplexTransferStatus.MIGRATED, column="column_value", ) response = client.create_tag(request) @@ -9334,6 +9347,10 @@ def test_create_tag(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.template == "template_value" assert response.template_display_name == "template_display_name_value" + assert ( + response.dataplex_transfer_status + == tags.TagTemplate.DataplexTransferStatus.MIGRATED + ) def test_create_tag_non_empty_request_with_auto_populated_field(): @@ -9460,6 +9477,7 @@ async def test_create_tag_async( name="name_value", template="template_value", template_display_name="template_display_name_value", + dataplex_transfer_status=tags.TagTemplate.DataplexTransferStatus.MIGRATED, ) ) response = await client.create_tag(request) @@ -9475,6 +9493,10 @@ async def test_create_tag_async( assert response.name == "name_value" assert response.template == "template_value" assert response.template_display_name == "template_display_name_value" + assert ( + response.dataplex_transfer_status + == tags.TagTemplate.DataplexTransferStatus.MIGRATED + ) @pytest.mark.asyncio @@ -9655,6 +9677,7 @@ def test_update_tag(request_type, transport: str = "grpc"): name="name_value", template="template_value", template_display_name="template_display_name_value", + dataplex_transfer_status=tags.TagTemplate.DataplexTransferStatus.MIGRATED, column="column_value", ) response = client.update_tag(request) @@ -9670,6 +9693,10 @@ def test_update_tag(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.template == "template_value" assert response.template_display_name == "template_display_name_value" + assert ( + response.dataplex_transfer_status + == tags.TagTemplate.DataplexTransferStatus.MIGRATED + ) def test_update_tag_non_empty_request_with_auto_populated_field(): @@ -9792,6 +9819,7 @@ async def test_update_tag_async( name="name_value", template="template_value", template_display_name="template_display_name_value", + dataplex_transfer_status=tags.TagTemplate.DataplexTransferStatus.MIGRATED, ) ) response = await client.update_tag(request) @@ -9807,6 +9835,10 @@ async def test_update_tag_async( assert response.name == "name_value" assert response.template == "template_value" assert response.template_display_name == "template_display_name_value" + assert ( + response.dataplex_transfer_status + == tags.TagTemplate.DataplexTransferStatus.MIGRATED + ) @pytest.mark.asyncio @@ -12873,167 +12905,930 @@ async def test_import_entries_field_headers_async(): ) in kw["metadata"] -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DataCatalogGrpcTransport( +@pytest.mark.parametrize( + "request_type", + [ + datacatalog.SetConfigRequest, + dict, + ], +) +def test_set_config(request_type, transport: str = "grpc"): + client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - with pytest.raises(ValueError): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_config), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.MigrationConfig( + tag_template_migration=datacatalog.TagTemplateMigration.TAG_TEMPLATE_MIGRATION_ENABLED, + catalog_ui_experience=datacatalog.CatalogUIExperience.CATALOG_UI_EXPERIENCE_ENABLED, ) + response = client.set_config(request) - # It is an error to provide a credentials file and a transport instance. - transport = transports.DataCatalogGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = datacatalog.SetConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.MigrationConfig) + assert ( + response.tag_template_migration + == datacatalog.TagTemplateMigration.TAG_TEMPLATE_MIGRATION_ENABLED + ) + assert ( + response.catalog_ui_experience + == datacatalog.CatalogUIExperience.CATALOG_UI_EXPERIENCE_ENABLED ) - with pytest.raises(ValueError): - client = DataCatalogClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - # It is an error to provide an api_key and a transport instance. - transport = transports.DataCatalogGrpcTransport( + +def test_set_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DataCatalogClient( - client_options=options, - transport=transport, - ) - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DataCatalogClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datacatalog.SetConfigRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_config), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.set_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.SetConfigRequest( + name="name_value", ) - # It is an error to provide scopes and a transport instance. - transport = transports.DataCatalogGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): + +def test_set_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = DataCatalogClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DataCatalogGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = DataCatalogClient(transport=transport) - assert client.transport is transport - + # Ensure method has been cached + assert client._transport.set_config in client._transport._wrapped_methods -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DataCatalogGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_config] = mock_rpc + request = {} + client.set_config(request) - transport = transports.DataCatalogGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + client.set_config(request) -@pytest.mark.parametrize( - "transport_class", - [ - transports.DataCatalogGrpcTransport, - transports.DataCatalogGrpcAsyncIOTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 -def test_transport_kind_grpc(): - transport = DataCatalogClient.get_transport_class("grpc")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" +@pytest.mark.asyncio +async def test_set_config_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataCatalogAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -def test_initialize_client_w_grpc(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc" - ) - assert client is not None + # Ensure method has been cached + assert ( + client._client._transport.set_config + in client._client._transport._wrapped_methods + ) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.set_config + ] = mock_rpc -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_search_catalog_empty_call_grpc(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + request = {} + await client.set_config(request) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.search_catalog), "__call__") as call: - call.return_value = datacatalog.SearchCatalogResponse() - client.search_catalog(request=None) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = datacatalog.SearchCatalogRequest() + await client.set_config(request) - assert args[0] == request_msg + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_entry_group_empty_call_grpc(): - client = DataCatalogClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", +@pytest.mark.asyncio +async def test_set_config_async( + transport: str = "grpc_asyncio", request_type=datacatalog.SetConfigRequest +): + client = DataCatalogAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), "__call__" - ) as call: - call.return_value = datacatalog.EntryGroup() - client.create_entry_group(request=None) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Establish that the underlying stub method was called. - call.assert_called() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_config), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datacatalog.MigrationConfig( + tag_template_migration=datacatalog.TagTemplateMigration.TAG_TEMPLATE_MIGRATION_ENABLED, + catalog_ui_experience=datacatalog.CatalogUIExperience.CATALOG_UI_EXPERIENCE_ENABLED, + ) + ) + response = await client.set_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request_msg = datacatalog.CreateEntryGroupRequest() + request = datacatalog.SetConfigRequest() + assert args[0] == request - assert args[0] == request_msg + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.MigrationConfig) + assert ( + response.tag_template_migration + == datacatalog.TagTemplateMigration.TAG_TEMPLATE_MIGRATION_ENABLED + ) + assert ( + response.catalog_ui_experience + == datacatalog.CatalogUIExperience.CATALOG_UI_EXPERIENCE_ENABLED + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_entry_group_empty_call_grpc(): +@pytest.mark.asyncio +async def test_set_config_async_from_dict(): + await test_set_config_async(request_type=dict) + + +def test_set_config_field_headers(): client = DataCatalogClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: - call.return_value = datacatalog.EntryGroup() - client.get_entry_group(request=None) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.SetConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_config), "__call__") as call: + call.return_value = datacatalog.MigrationConfig() + client.set_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_config_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.SetConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_config), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datacatalog.MigrationConfig() + ) + await client.set_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + datacatalog.RetrieveConfigRequest, + dict, + ], +) +def test_retrieve_config(request_type, transport: str = "grpc"): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.retrieve_config), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.OrganizationConfig() + response = client.retrieve_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = datacatalog.RetrieveConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.OrganizationConfig) + + +def test_retrieve_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datacatalog.RetrieveConfigRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.retrieve_config), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.retrieve_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.RetrieveConfigRequest( + name="name_value", + ) + + +def test_retrieve_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.retrieve_config in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.retrieve_config] = mock_rpc + request = {} + client.retrieve_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.retrieve_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_retrieve_config_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataCatalogAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.retrieve_config + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.retrieve_config + ] = mock_rpc + + request = {} + await client.retrieve_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.retrieve_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_retrieve_config_async( + transport: str = "grpc_asyncio", request_type=datacatalog.RetrieveConfigRequest +): + client = DataCatalogAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.retrieve_config), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datacatalog.OrganizationConfig() + ) + response = await client.retrieve_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = datacatalog.RetrieveConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.OrganizationConfig) + + +@pytest.mark.asyncio +async def test_retrieve_config_async_from_dict(): + await test_retrieve_config_async(request_type=dict) + + +def test_retrieve_config_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.RetrieveConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.retrieve_config), "__call__") as call: + call.return_value = datacatalog.OrganizationConfig() + client.retrieve_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_retrieve_config_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.RetrieveConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.retrieve_config), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datacatalog.OrganizationConfig() + ) + await client.retrieve_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + datacatalog.RetrieveEffectiveConfigRequest, + dict, + ], +) +def test_retrieve_effective_config(request_type, transport: str = "grpc"): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_effective_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = datacatalog.MigrationConfig( + tag_template_migration=datacatalog.TagTemplateMigration.TAG_TEMPLATE_MIGRATION_ENABLED, + catalog_ui_experience=datacatalog.CatalogUIExperience.CATALOG_UI_EXPERIENCE_ENABLED, + ) + response = client.retrieve_effective_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = datacatalog.RetrieveEffectiveConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.MigrationConfig) + assert ( + response.tag_template_migration + == datacatalog.TagTemplateMigration.TAG_TEMPLATE_MIGRATION_ENABLED + ) + assert ( + response.catalog_ui_experience + == datacatalog.CatalogUIExperience.CATALOG_UI_EXPERIENCE_ENABLED + ) + + +def test_retrieve_effective_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = datacatalog.RetrieveEffectiveConfigRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_effective_config), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.retrieve_effective_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == datacatalog.RetrieveEffectiveConfigRequest( + name="name_value", + ) + + +def test_retrieve_effective_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.retrieve_effective_config + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.retrieve_effective_config + ] = mock_rpc + request = {} + client.retrieve_effective_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.retrieve_effective_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_retrieve_effective_config_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataCatalogAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.retrieve_effective_config + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.retrieve_effective_config + ] = mock_rpc + + request = {} + await client.retrieve_effective_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.retrieve_effective_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_retrieve_effective_config_async( + transport: str = "grpc_asyncio", + request_type=datacatalog.RetrieveEffectiveConfigRequest, +): + client = DataCatalogAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_effective_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datacatalog.MigrationConfig( + tag_template_migration=datacatalog.TagTemplateMigration.TAG_TEMPLATE_MIGRATION_ENABLED, + catalog_ui_experience=datacatalog.CatalogUIExperience.CATALOG_UI_EXPERIENCE_ENABLED, + ) + ) + response = await client.retrieve_effective_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = datacatalog.RetrieveEffectiveConfigRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, datacatalog.MigrationConfig) + assert ( + response.tag_template_migration + == datacatalog.TagTemplateMigration.TAG_TEMPLATE_MIGRATION_ENABLED + ) + assert ( + response.catalog_ui_experience + == datacatalog.CatalogUIExperience.CATALOG_UI_EXPERIENCE_ENABLED + ) + + +@pytest.mark.asyncio +async def test_retrieve_effective_config_async_from_dict(): + await test_retrieve_effective_config_async(request_type=dict) + + +def test_retrieve_effective_config_field_headers(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.RetrieveEffectiveConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_effective_config), "__call__" + ) as call: + call.return_value = datacatalog.MigrationConfig() + client.retrieve_effective_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_retrieve_effective_config_field_headers_async(): + client = DataCatalogAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = datacatalog.RetrieveEffectiveConfigRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_effective_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datacatalog.MigrationConfig() + ) + await client.retrieve_effective_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DataCatalogGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DataCatalogGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataCatalogClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DataCatalogGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataCatalogClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataCatalogClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DataCatalogGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataCatalogClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataCatalogGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DataCatalogClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataCatalogGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DataCatalogGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataCatalogGrpcTransport, + transports.DataCatalogGrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = DataCatalogClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_search_catalog_empty_call_grpc(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.search_catalog), "__call__") as call: + call.return_value = datacatalog.SearchCatalogResponse() + client.search_catalog(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datacatalog.SearchCatalogRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_entry_group_empty_call_grpc(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), "__call__" + ) as call: + call.return_value = datacatalog.EntryGroup() + client.create_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datacatalog.CreateEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_entry_group_empty_call_grpc(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: + call.return_value = datacatalog.EntryGroup() + client.get_entry_group(request=None) # Establish that the underlying stub method was called. call.assert_called() @@ -13722,6 +14517,71 @@ def test_import_entries_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_set_config_empty_call_grpc(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.set_config), "__call__") as call: + call.return_value = datacatalog.MigrationConfig() + client.set_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datacatalog.SetConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_retrieve_config_empty_call_grpc(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.retrieve_config), "__call__") as call: + call.return_value = datacatalog.OrganizationConfig() + client.retrieve_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datacatalog.RetrieveConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_retrieve_effective_config_empty_call_grpc(): + client = DataCatalogClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_effective_config), "__call__" + ) as call: + call.return_value = datacatalog.MigrationConfig() + client.retrieve_effective_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datacatalog.RetrieveEffectiveConfigRequest() + + assert args[0] == request_msg + + def test_transport_kind_grpc_asyncio(): transport = DataCatalogAsyncClient.get_transport_class("grpc_asyncio")( credentials=async_anonymous_credentials() @@ -13784,6 +14644,7 @@ async def test_create_entry_group_empty_call_grpc_asyncio(): name="name_value", display_name="display_name_value", description="description_value", + transferred_to_dataplex=True, ) ) await client.create_entry_group(request=None) @@ -13813,6 +14674,7 @@ async def test_get_entry_group_empty_call_grpc_asyncio(): name="name_value", display_name="display_name_value", description="description_value", + transferred_to_dataplex=True, ) ) await client.get_entry_group(request=None) @@ -13844,6 +14706,7 @@ async def test_update_entry_group_empty_call_grpc_asyncio(): name="name_value", display_name="display_name_value", description="description_value", + transferred_to_dataplex=True, ) ) await client.update_entry_group(request=None) @@ -14433,6 +15296,7 @@ async def test_create_tag_empty_call_grpc_asyncio(): name="name_value", template="template_value", template_display_name="template_display_name_value", + dataplex_transfer_status=tags.TagTemplate.DataplexTransferStatus.MIGRATED, ) ) await client.create_tag(request=None) @@ -14462,6 +15326,7 @@ async def test_update_tag_empty_call_grpc_asyncio(): name="name_value", template="template_value", template_display_name="template_display_name_value", + dataplex_transfer_status=tags.TagTemplate.DataplexTransferStatus.MIGRATED, ) ) await client.update_tag(request=None) @@ -14709,6 +15574,89 @@ async def test_import_entries_empty_call_grpc_asyncio(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_set_config_empty_call_grpc_asyncio(): + client = DataCatalogAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.set_config), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datacatalog.MigrationConfig( + tag_template_migration=datacatalog.TagTemplateMigration.TAG_TEMPLATE_MIGRATION_ENABLED, + catalog_ui_experience=datacatalog.CatalogUIExperience.CATALOG_UI_EXPERIENCE_ENABLED, + ) + ) + await client.set_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datacatalog.SetConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_retrieve_config_empty_call_grpc_asyncio(): + client = DataCatalogAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.retrieve_config), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datacatalog.OrganizationConfig() + ) + await client.retrieve_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datacatalog.RetrieveConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_retrieve_effective_config_empty_call_grpc_asyncio(): + client = DataCatalogAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.retrieve_effective_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + datacatalog.MigrationConfig( + tag_template_migration=datacatalog.TagTemplateMigration.TAG_TEMPLATE_MIGRATION_ENABLED, + catalog_ui_experience=datacatalog.CatalogUIExperience.CATALOG_UI_EXPERIENCE_ENABLED, + ) + ) + await client.retrieve_effective_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = datacatalog.RetrieveEffectiveConfigRequest() + + assert args[0] == request_msg + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = DataCatalogClient( @@ -14776,6 +15724,9 @@ def test_data_catalog_base_transport(): "get_iam_policy", "test_iam_permissions", "import_entries", + "set_config", + "retrieve_config", + "retrieve_effective_config", "get_operation", "cancel_operation", "delete_operation",