diff --git a/packages/google-cloud-dataplex/docs/dataplex_v1/business_glossary_service.rst b/packages/google-cloud-dataplex/docs/dataplex_v1/business_glossary_service.rst new file mode 100644 index 000000000000..278bc27fcd30 --- /dev/null +++ b/packages/google-cloud-dataplex/docs/dataplex_v1/business_glossary_service.rst @@ -0,0 +1,10 @@ +BusinessGlossaryService +----------------------------------------- + +.. automodule:: google.cloud.dataplex_v1.services.business_glossary_service + :members: + :inherited-members: + +.. automodule:: google.cloud.dataplex_v1.services.business_glossary_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-dataplex/docs/dataplex_v1/services_.rst b/packages/google-cloud-dataplex/docs/dataplex_v1/services_.rst index 4b62f5c74808..4f97a5efe7f1 100644 --- a/packages/google-cloud-dataplex/docs/dataplex_v1/services_.rst +++ b/packages/google-cloud-dataplex/docs/dataplex_v1/services_.rst @@ -3,6 +3,7 @@ Services for Google Cloud Dataplex v1 API .. toctree:: :maxdepth: 2 + business_glossary_service catalog_service cmek_service content_service diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex/__init__.py b/packages/google-cloud-dataplex/google/cloud/dataplex/__init__.py index 9f304058606c..c72588ce32f9 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex/__init__.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex/__init__.py @@ -18,6 +18,12 @@ __version__ = package_version.__version__ +from google.cloud.dataplex_v1.services.business_glossary_service.async_client import ( + BusinessGlossaryServiceAsyncClient, +) +from google.cloud.dataplex_v1.services.business_glossary_service.client import ( + BusinessGlossaryServiceClient, +) from google.cloud.dataplex_v1.services.catalog_service.async_client import ( CatalogServiceAsyncClient, ) @@ -59,6 +65,29 @@ MetadataServiceClient, ) from google.cloud.dataplex_v1.types.analyze import Content, Environment, Session +from google.cloud.dataplex_v1.types.business_glossary import ( + CreateGlossaryCategoryRequest, + CreateGlossaryRequest, + CreateGlossaryTermRequest, + DeleteGlossaryCategoryRequest, + DeleteGlossaryRequest, + DeleteGlossaryTermRequest, + GetGlossaryCategoryRequest, + GetGlossaryRequest, + GetGlossaryTermRequest, + Glossary, + GlossaryCategory, + GlossaryTerm, + ListGlossariesRequest, + ListGlossariesResponse, + ListGlossaryCategoriesRequest, + ListGlossaryCategoriesResponse, + ListGlossaryTermsRequest, + ListGlossaryTermsResponse, + UpdateGlossaryCategoryRequest, + UpdateGlossaryRequest, + UpdateGlossaryTermRequest, +) from google.cloud.dataplex_v1.types.catalog import ( Aspect, AspectSource, @@ -66,20 +95,24 @@ CancelMetadataJobRequest, CreateAspectTypeRequest, CreateEntryGroupRequest, + CreateEntryLinkRequest, CreateEntryRequest, CreateEntryTypeRequest, CreateMetadataJobRequest, DeleteAspectTypeRequest, DeleteEntryGroupRequest, + DeleteEntryLinkRequest, DeleteEntryRequest, DeleteEntryTypeRequest, Entry, EntryGroup, + EntryLink, EntrySource, EntryType, EntryView, GetAspectTypeRequest, GetEntryGroupRequest, + GetEntryLinkRequest, GetEntryRequest, GetEntryTypeRequest, GetMetadataJobRequest, @@ -180,6 +213,9 @@ RunDataScanResponse, UpdateDataScanRequest, ) +from google.cloud.dataplex_v1.types.datascans_common import ( + DataScanCatalogPublishingStatus, +) from google.cloud.dataplex_v1.types.logs import ( BusinessGlossaryEvent, DataQualityScanRuleResult, @@ -267,6 +303,8 @@ from google.cloud.dataplex_v1.types.tasks import Job, Task __all__ = ( + "BusinessGlossaryServiceClient", + "BusinessGlossaryServiceAsyncClient", "CatalogServiceClient", "CatalogServiceAsyncClient", "CmekServiceClient", @@ -284,25 +322,50 @@ "Content", "Environment", "Session", + "CreateGlossaryCategoryRequest", + "CreateGlossaryRequest", + "CreateGlossaryTermRequest", + "DeleteGlossaryCategoryRequest", + "DeleteGlossaryRequest", + "DeleteGlossaryTermRequest", + "GetGlossaryCategoryRequest", + "GetGlossaryRequest", + "GetGlossaryTermRequest", + "Glossary", + "GlossaryCategory", + "GlossaryTerm", + "ListGlossariesRequest", + "ListGlossariesResponse", + "ListGlossaryCategoriesRequest", + "ListGlossaryCategoriesResponse", + "ListGlossaryTermsRequest", + "ListGlossaryTermsResponse", + "UpdateGlossaryCategoryRequest", + "UpdateGlossaryRequest", + "UpdateGlossaryTermRequest", "Aspect", "AspectSource", "AspectType", "CancelMetadataJobRequest", "CreateAspectTypeRequest", "CreateEntryGroupRequest", + "CreateEntryLinkRequest", "CreateEntryRequest", "CreateEntryTypeRequest", "CreateMetadataJobRequest", "DeleteAspectTypeRequest", "DeleteEntryGroupRequest", + "DeleteEntryLinkRequest", "DeleteEntryRequest", "DeleteEntryTypeRequest", "Entry", "EntryGroup", + "EntryLink", "EntrySource", "EntryType", "GetAspectTypeRequest", "GetEntryGroupRequest", + "GetEntryLinkRequest", "GetEntryRequest", "GetEntryTypeRequest", "GetMetadataJobRequest", @@ -389,6 +452,7 @@ "RunDataScanResponse", "UpdateDataScanRequest", "DataScanType", + "DataScanCatalogPublishingStatus", "BusinessGlossaryEvent", "DataQualityScanRuleResult", "DataScanEvent", diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/__init__.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/__init__.py index 2b4d6bc39dfd..104b6f66fc5d 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/__init__.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/__init__.py @@ -18,6 +18,10 @@ __version__ = package_version.__version__ +from .services.business_glossary_service import ( + BusinessGlossaryServiceAsyncClient, + BusinessGlossaryServiceClient, +) from .services.catalog_service import CatalogServiceAsyncClient, CatalogServiceClient from .services.cmek_service import CmekServiceAsyncClient, CmekServiceClient from .services.content_service import ContentServiceAsyncClient, ContentServiceClient @@ -32,6 +36,29 @@ from .services.dataplex_service import DataplexServiceAsyncClient, DataplexServiceClient from .services.metadata_service import MetadataServiceAsyncClient, MetadataServiceClient from .types.analyze import Content, Environment, Session +from .types.business_glossary import ( + CreateGlossaryCategoryRequest, + CreateGlossaryRequest, + CreateGlossaryTermRequest, + DeleteGlossaryCategoryRequest, + DeleteGlossaryRequest, + DeleteGlossaryTermRequest, + GetGlossaryCategoryRequest, + GetGlossaryRequest, + GetGlossaryTermRequest, + Glossary, + GlossaryCategory, + GlossaryTerm, + ListGlossariesRequest, + ListGlossariesResponse, + ListGlossaryCategoriesRequest, + ListGlossaryCategoriesResponse, + ListGlossaryTermsRequest, + ListGlossaryTermsResponse, + UpdateGlossaryCategoryRequest, + UpdateGlossaryRequest, + UpdateGlossaryTermRequest, +) from .types.catalog import ( Aspect, AspectSource, @@ -39,20 +66,24 @@ CancelMetadataJobRequest, CreateAspectTypeRequest, CreateEntryGroupRequest, + CreateEntryLinkRequest, CreateEntryRequest, CreateEntryTypeRequest, CreateMetadataJobRequest, DeleteAspectTypeRequest, DeleteEntryGroupRequest, + DeleteEntryLinkRequest, DeleteEntryRequest, DeleteEntryTypeRequest, Entry, EntryGroup, + EntryLink, EntrySource, EntryType, EntryView, GetAspectTypeRequest, GetEntryGroupRequest, + GetEntryLinkRequest, GetEntryRequest, GetEntryTypeRequest, GetMetadataJobRequest, @@ -147,6 +178,7 @@ RunDataScanResponse, UpdateDataScanRequest, ) +from .types.datascans_common import DataScanCatalogPublishingStatus from .types.logs import ( BusinessGlossaryEvent, DataQualityScanRuleResult, @@ -227,6 +259,7 @@ from .types.tasks import Job, Task __all__ = ( + "BusinessGlossaryServiceAsyncClient", "CatalogServiceAsyncClient", "CmekServiceAsyncClient", "ContentServiceAsyncClient", @@ -241,6 +274,7 @@ "Asset", "AssetStatus", "BusinessGlossaryEvent", + "BusinessGlossaryServiceClient", "CancelJobRequest", "CancelMetadataJobRequest", "CatalogServiceClient", @@ -257,9 +291,13 @@ "CreateEncryptionConfigRequest", "CreateEntityRequest", "CreateEntryGroupRequest", + "CreateEntryLinkRequest", "CreateEntryRequest", "CreateEntryTypeRequest", "CreateEnvironmentRequest", + "CreateGlossaryCategoryRequest", + "CreateGlossaryRequest", + "CreateGlossaryTermRequest", "CreateLakeRequest", "CreateMetadataJobRequest", "CreatePartitionRequest", @@ -281,6 +319,7 @@ "DataQualityScanRuleResult", "DataQualitySpec", "DataScan", + "DataScanCatalogPublishingStatus", "DataScanEvent", "DataScanJob", "DataScanServiceClient", @@ -299,9 +338,13 @@ "DeleteEncryptionConfigRequest", "DeleteEntityRequest", "DeleteEntryGroupRequest", + "DeleteEntryLinkRequest", "DeleteEntryRequest", "DeleteEntryTypeRequest", "DeleteEnvironmentRequest", + "DeleteGlossaryCategoryRequest", + "DeleteGlossaryRequest", + "DeleteGlossaryTermRequest", "DeleteLakeRequest", "DeletePartitionRequest", "DeleteTaskRequest", @@ -311,6 +354,7 @@ "Entity", "Entry", "EntryGroup", + "EntryLink", "EntryLinkEvent", "EntrySource", "EntryType", @@ -329,15 +373,22 @@ "GetEncryptionConfigRequest", "GetEntityRequest", "GetEntryGroupRequest", + "GetEntryLinkRequest", "GetEntryRequest", "GetEntryTypeRequest", "GetEnvironmentRequest", + "GetGlossaryCategoryRequest", + "GetGlossaryRequest", + "GetGlossaryTermRequest", "GetJobRequest", "GetLakeRequest", "GetMetadataJobRequest", "GetPartitionRequest", "GetTaskRequest", "GetZoneRequest", + "Glossary", + "GlossaryCategory", + "GlossaryTerm", "GovernanceEvent", "ImportItem", "Job", @@ -373,6 +424,12 @@ "ListEntryTypesResponse", "ListEnvironmentsRequest", "ListEnvironmentsResponse", + "ListGlossariesRequest", + "ListGlossariesResponse", + "ListGlossaryCategoriesRequest", + "ListGlossaryCategoriesResponse", + "ListGlossaryTermsRequest", + "ListGlossaryTermsResponse", "ListJobsRequest", "ListJobsResponse", "ListLakeActionsRequest", @@ -426,6 +483,9 @@ "UpdateEntryRequest", "UpdateEntryTypeRequest", "UpdateEnvironmentRequest", + "UpdateGlossaryCategoryRequest", + "UpdateGlossaryRequest", + "UpdateGlossaryTermRequest", "UpdateLakeRequest", "UpdateTaskRequest", "UpdateZoneRequest", diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_metadata.json b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_metadata.json index f9cbea29b093..dd1090ea5c40 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_metadata.json +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/gapic_metadata.json @@ -5,6 +5,250 @@ "protoPackage": "google.cloud.dataplex.v1", "schema": "1.0", "services": { + "BusinessGlossaryService": { + "clients": { + "grpc": { + "libraryClient": "BusinessGlossaryServiceClient", + "rpcs": { + "CreateGlossary": { + "methods": [ + "create_glossary" + ] + }, + "CreateGlossaryCategory": { + "methods": [ + "create_glossary_category" + ] + }, + "CreateGlossaryTerm": { + "methods": [ + "create_glossary_term" + ] + }, + "DeleteGlossary": { + "methods": [ + "delete_glossary" + ] + }, + "DeleteGlossaryCategory": { + "methods": [ + "delete_glossary_category" + ] + }, + "DeleteGlossaryTerm": { + "methods": [ + "delete_glossary_term" + ] + }, + "GetGlossary": { + "methods": [ + "get_glossary" + ] + }, + "GetGlossaryCategory": { + "methods": [ + "get_glossary_category" + ] + }, + "GetGlossaryTerm": { + "methods": [ + "get_glossary_term" + ] + }, + "ListGlossaries": { + "methods": [ + "list_glossaries" + ] + }, + "ListGlossaryCategories": { + "methods": [ + "list_glossary_categories" + ] + }, + "ListGlossaryTerms": { + "methods": [ + "list_glossary_terms" + ] + }, + "UpdateGlossary": { + "methods": [ + "update_glossary" + ] + }, + "UpdateGlossaryCategory": { + "methods": [ + "update_glossary_category" + ] + }, + "UpdateGlossaryTerm": { + "methods": [ + "update_glossary_term" + ] + } + } + }, + "grpc-async": { + "libraryClient": "BusinessGlossaryServiceAsyncClient", + "rpcs": { + "CreateGlossary": { + "methods": [ + "create_glossary" + ] + }, + "CreateGlossaryCategory": { + "methods": [ + "create_glossary_category" + ] + }, + "CreateGlossaryTerm": { + "methods": [ + "create_glossary_term" + ] + }, + "DeleteGlossary": { + "methods": [ + "delete_glossary" + ] + }, + "DeleteGlossaryCategory": { + "methods": [ + "delete_glossary_category" + ] + }, + "DeleteGlossaryTerm": { + "methods": [ + "delete_glossary_term" + ] + }, + "GetGlossary": { + "methods": [ + "get_glossary" + ] + }, + "GetGlossaryCategory": { + "methods": [ + "get_glossary_category" + ] + }, + "GetGlossaryTerm": { + "methods": [ + "get_glossary_term" + ] + }, + "ListGlossaries": { + "methods": [ + "list_glossaries" + ] + }, + "ListGlossaryCategories": { + "methods": [ + "list_glossary_categories" + ] + }, + "ListGlossaryTerms": { + "methods": [ + "list_glossary_terms" + ] + }, + "UpdateGlossary": { + "methods": [ + "update_glossary" + ] + }, + "UpdateGlossaryCategory": { + "methods": [ + "update_glossary_category" + ] + }, + "UpdateGlossaryTerm": { + "methods": [ + "update_glossary_term" + ] + } + } + }, + "rest": { + "libraryClient": "BusinessGlossaryServiceClient", + "rpcs": { + "CreateGlossary": { + "methods": [ + "create_glossary" + ] + }, + "CreateGlossaryCategory": { + "methods": [ + "create_glossary_category" + ] + }, + "CreateGlossaryTerm": { + "methods": [ + "create_glossary_term" + ] + }, + "DeleteGlossary": { + "methods": [ + "delete_glossary" + ] + }, + "DeleteGlossaryCategory": { + "methods": [ + "delete_glossary_category" + ] + }, + "DeleteGlossaryTerm": { + "methods": [ + "delete_glossary_term" + ] + }, + "GetGlossary": { + "methods": [ + "get_glossary" + ] + }, + "GetGlossaryCategory": { + "methods": [ + "get_glossary_category" + ] + }, + "GetGlossaryTerm": { + "methods": [ + "get_glossary_term" + ] + }, + "ListGlossaries": { + "methods": [ + "list_glossaries" + ] + }, + "ListGlossaryCategories": { + "methods": [ + "list_glossary_categories" + ] + }, + "ListGlossaryTerms": { + "methods": [ + "list_glossary_terms" + ] + }, + "UpdateGlossary": { + "methods": [ + "update_glossary" + ] + }, + "UpdateGlossaryCategory": { + "methods": [ + "update_glossary_category" + ] + }, + "UpdateGlossaryTerm": { + "methods": [ + "update_glossary_term" + ] + } + } + } + } + }, "CatalogService": { "clients": { "grpc": { @@ -30,6 +274,11 @@ "create_entry_group" ] }, + "CreateEntryLink": { + "methods": [ + "create_entry_link" + ] + }, "CreateEntryType": { "methods": [ "create_entry_type" @@ -55,6 +304,11 @@ "delete_entry_group" ] }, + "DeleteEntryLink": { + "methods": [ + "delete_entry_link" + ] + }, "DeleteEntryType": { "methods": [ "delete_entry_type" @@ -75,6 +329,11 @@ "get_entry_group" ] }, + "GetEntryLink": { + "methods": [ + "get_entry_link" + ] + }, "GetEntryType": { "methods": [ "get_entry_type" @@ -165,6 +424,11 @@ "create_entry_group" ] }, + "CreateEntryLink": { + "methods": [ + "create_entry_link" + ] + }, "CreateEntryType": { "methods": [ "create_entry_type" @@ -190,6 +454,11 @@ "delete_entry_group" ] }, + "DeleteEntryLink": { + "methods": [ + "delete_entry_link" + ] + }, "DeleteEntryType": { "methods": [ "delete_entry_type" @@ -210,6 +479,11 @@ "get_entry_group" ] }, + "GetEntryLink": { + "methods": [ + "get_entry_link" + ] + }, "GetEntryType": { "methods": [ "get_entry_type" @@ -300,6 +574,11 @@ "create_entry_group" ] }, + "CreateEntryLink": { + "methods": [ + "create_entry_link" + ] + }, "CreateEntryType": { "methods": [ "create_entry_type" @@ -325,6 +604,11 @@ "delete_entry_group" ] }, + "DeleteEntryLink": { + "methods": [ + "delete_entry_link" + ] + }, "DeleteEntryType": { "methods": [ "delete_entry_type" @@ -345,6 +629,11 @@ "get_entry_group" ] }, + "GetEntryLink": { + "methods": [ + "get_entry_link" + ] + }, "GetEntryType": { "methods": [ "get_entry_type" diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/__init__.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/__init__.py new file mode 100644 index 000000000000..6adb2000e4a8 --- /dev/null +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import BusinessGlossaryServiceAsyncClient +from .client import BusinessGlossaryServiceClient + +__all__ = ( + "BusinessGlossaryServiceClient", + "BusinessGlossaryServiceAsyncClient", +) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/async_client.py new file mode 100644 index 000000000000..31f7db5c5c13 --- /dev/null +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/async_client.py @@ -0,0 +1,2573 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import logging as std_logging +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.dataplex_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.dataplex_v1.services.business_glossary_service import pagers +from google.cloud.dataplex_v1.types import business_glossary, service + +from .client import BusinessGlossaryServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, BusinessGlossaryServiceTransport +from .transports.grpc_asyncio import BusinessGlossaryServiceGrpcAsyncIOTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class BusinessGlossaryServiceAsyncClient: + """BusinessGlossaryService provides APIs for managing business + glossary resources for enterprise customers. + The resources currently supported in Business Glossary are: + + 1. Glossary + 2. GlossaryCategory + 3. GlossaryTerm + """ + + _client: BusinessGlossaryServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = BusinessGlossaryServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = BusinessGlossaryServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = ( + BusinessGlossaryServiceClient._DEFAULT_ENDPOINT_TEMPLATE + ) + _DEFAULT_UNIVERSE = BusinessGlossaryServiceClient._DEFAULT_UNIVERSE + + glossary_path = staticmethod(BusinessGlossaryServiceClient.glossary_path) + parse_glossary_path = staticmethod( + BusinessGlossaryServiceClient.parse_glossary_path + ) + glossary_category_path = staticmethod( + BusinessGlossaryServiceClient.glossary_category_path + ) + parse_glossary_category_path = staticmethod( + BusinessGlossaryServiceClient.parse_glossary_category_path + ) + glossary_term_path = staticmethod(BusinessGlossaryServiceClient.glossary_term_path) + parse_glossary_term_path = staticmethod( + BusinessGlossaryServiceClient.parse_glossary_term_path + ) + common_billing_account_path = staticmethod( + BusinessGlossaryServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + BusinessGlossaryServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(BusinessGlossaryServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + BusinessGlossaryServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + BusinessGlossaryServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + BusinessGlossaryServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod( + BusinessGlossaryServiceClient.common_project_path + ) + parse_common_project_path = staticmethod( + BusinessGlossaryServiceClient.parse_common_project_path + ) + common_location_path = staticmethod( + BusinessGlossaryServiceClient.common_location_path + ) + parse_common_location_path = staticmethod( + BusinessGlossaryServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BusinessGlossaryServiceAsyncClient: The constructed client. + """ + return BusinessGlossaryServiceClient.from_service_account_info.__func__(BusinessGlossaryServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BusinessGlossaryServiceAsyncClient: The constructed client. + """ + return BusinessGlossaryServiceClient.from_service_account_file.__func__(BusinessGlossaryServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return BusinessGlossaryServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> BusinessGlossaryServiceTransport: + """Returns the transport used by the client instance. + + Returns: + BusinessGlossaryServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = BusinessGlossaryServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + BusinessGlossaryServiceTransport, + Callable[..., BusinessGlossaryServiceTransport], + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the business glossary service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,BusinessGlossaryServiceTransport,Callable[..., BusinessGlossaryServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the BusinessGlossaryServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = BusinessGlossaryServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient`.", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "credentialsType": None, + }, + ) + + async def create_glossary( + self, + request: Optional[Union[business_glossary.CreateGlossaryRequest, dict]] = None, + *, + parent: Optional[str] = None, + glossary: Optional[business_glossary.Glossary] = None, + glossary_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a new Glossary resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_glossary(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateGlossaryRequest( + parent="parent_value", + glossary_id="glossary_id_value", + ) + + # Make the request + operation = client.create_glossary(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateGlossaryRequest, dict]]): + The request object. Create Glossary Request + parent (:class:`str`): + Required. The parent resource where this Glossary will + be created. Format: + projects/{project_id_or_number}/locations/{location_id} + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + glossary (:class:`google.cloud.dataplex_v1.types.Glossary`): + Required. The Glossary to create. + This corresponds to the ``glossary`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + glossary_id (:class:`str`): + Required. Glossary ID: Glossary + identifier. + + This corresponds to the ``glossary_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Glossary` A Glossary represents a collection of GlossaryCategories and GlossaryTerms + defined by the user. Glossary is a top level resource + and is the Google Cloud parent resource of all the + GlossaryCategories and GlossaryTerms within it. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, glossary, glossary_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.CreateGlossaryRequest): + request = business_glossary.CreateGlossaryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if glossary is not None: + request.glossary = glossary + if glossary_id is not None: + request.glossary_id = glossary_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_glossary + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + business_glossary.Glossary, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_glossary( + self, + request: Optional[Union[business_glossary.UpdateGlossaryRequest, dict]] = None, + *, + glossary: Optional[business_glossary.Glossary] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a Glossary resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_glossary(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateGlossaryRequest( + ) + + # Make the request + operation = client.update_glossary(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateGlossaryRequest, dict]]): + The request object. Update Glossary Request + glossary (:class:`google.cloud.dataplex_v1.types.Glossary`): + Required. The Glossary to update. The Glossary's + ``name`` field is used to identify the Glossary to + update. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + + This corresponds to the ``glossary`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Glossary` A Glossary represents a collection of GlossaryCategories and GlossaryTerms + defined by the user. Glossary is a top level resource + and is the Google Cloud parent resource of all the + GlossaryCategories and GlossaryTerms within it. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [glossary, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.UpdateGlossaryRequest): + request = business_glossary.UpdateGlossaryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if glossary is not None: + request.glossary = glossary + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_glossary + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("glossary.name", request.glossary.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + business_glossary.Glossary, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_glossary( + self, + request: Optional[Union[business_glossary.DeleteGlossaryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a Glossary resource. All the categories and + terms within the Glossary must be deleted before the + Glossary can be deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_glossary(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteGlossaryRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_glossary(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteGlossaryRequest, dict]]): + The request object. Delete Glossary Request + name (:class:`str`): + Required. The name of the Glossary to delete. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.DeleteGlossaryRequest): + request = business_glossary.DeleteGlossaryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_glossary + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_glossary( + self, + request: Optional[Union[business_glossary.GetGlossaryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.Glossary: + r"""Gets a Glossary resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_glossary(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetGlossaryRequest( + name="name_value", + ) + + # Make the request + response = await client.get_glossary(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetGlossaryRequest, dict]]): + The request object. Get Glossary Request + name (:class:`str`): + Required. The name of the Glossary to retrieve. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Glossary: + A Glossary represents a collection of + GlossaryCategories and GlossaryTerms + defined by the user. Glossary is a top + level resource and is the Google Cloud + parent resource of all the + GlossaryCategories and GlossaryTerms + within it. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.GetGlossaryRequest): + request = business_glossary.GetGlossaryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_glossary + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_glossaries( + self, + request: Optional[Union[business_glossary.ListGlossariesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListGlossariesAsyncPager: + r"""Lists Glossary resources in a project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_glossaries(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListGlossariesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_glossaries(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListGlossariesRequest, dict]]): + The request object. List Glossaries Request + parent (:class:`str`): + Required. The parent, which has this collection of + Glossaries. Format: + projects/{project_id_or_number}/locations/{location_id} + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.business_glossary_service.pagers.ListGlossariesAsyncPager: + List Glossaries Response + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.ListGlossariesRequest): + request = business_glossary.ListGlossariesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_glossaries + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListGlossariesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_glossary_category( + self, + request: Optional[ + Union[business_glossary.CreateGlossaryCategoryRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + category: Optional[business_glossary.GlossaryCategory] = None, + category_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryCategory: + r"""Creates a new GlossaryCategory resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_glossary_category(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + category = dataplex_v1.GlossaryCategory() + category.parent = "parent_value" + + request = dataplex_v1.CreateGlossaryCategoryRequest( + parent="parent_value", + category_id="category_id_value", + category=category, + ) + + # Make the request + response = await client.create_glossary_category(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateGlossaryCategoryRequest, dict]]): + The request object. Creates a new GlossaryCategory under + the specified Glossary. + parent (:class:`str`): + Required. The parent resource where this + GlossaryCategory will be created. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + where ``locationId`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + category (:class:`google.cloud.dataplex_v1.types.GlossaryCategory`): + Required. The GlossaryCategory to + create. + + This corresponds to the ``category`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + category_id (:class:`str`): + Required. GlossaryCategory + identifier. + + This corresponds to the ``category_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.GlossaryCategory: + A GlossaryCategory represents a + collection of GlossaryCategories and + GlossaryTerms within a Glossary that are + related to each other. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, category, category_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.CreateGlossaryCategoryRequest): + request = business_glossary.CreateGlossaryCategoryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if category is not None: + request.category = category + if category_id is not None: + request.category_id = category_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_glossary_category + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_glossary_category( + self, + request: Optional[ + Union[business_glossary.UpdateGlossaryCategoryRequest, dict] + ] = None, + *, + category: Optional[business_glossary.GlossaryCategory] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryCategory: + r"""Updates a GlossaryCategory resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_glossary_category(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + category = dataplex_v1.GlossaryCategory() + category.parent = "parent_value" + + request = dataplex_v1.UpdateGlossaryCategoryRequest( + category=category, + ) + + # Make the request + response = await client.update_glossary_category(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateGlossaryCategoryRequest, dict]]): + The request object. Update GlossaryCategory Request + category (:class:`google.cloud.dataplex_v1.types.GlossaryCategory`): + Required. The GlossaryCategory to update. The + GlossaryCategory's ``name`` field is used to identify + the GlossaryCategory to update. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id} + + This corresponds to the ``category`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.GlossaryCategory: + A GlossaryCategory represents a + collection of GlossaryCategories and + GlossaryTerms within a Glossary that are + related to each other. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [category, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.UpdateGlossaryCategoryRequest): + request = business_glossary.UpdateGlossaryCategoryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if category is not None: + request.category = category + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_glossary_category + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("category.name", request.category.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_glossary_category( + self, + request: Optional[ + Union[business_glossary.DeleteGlossaryCategoryRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a GlossaryCategory resource. All the + GlossaryCategories and GlossaryTerms nested directly + under the specified GlossaryCategory will be moved one + level up to the parent in the hierarchy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_glossary_category(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteGlossaryCategoryRequest( + name="name_value", + ) + + # Make the request + await client.delete_glossary_category(request=request) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteGlossaryCategoryRequest, dict]]): + The request object. Delete GlossaryCategory Request + name (:class:`str`): + Required. The name of the GlossaryCategory to delete. + Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.DeleteGlossaryCategoryRequest): + request = business_glossary.DeleteGlossaryCategoryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_glossary_category + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_glossary_category( + self, + request: Optional[ + Union[business_glossary.GetGlossaryCategoryRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryCategory: + r"""Gets a GlossaryCategory resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_glossary_category(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetGlossaryCategoryRequest( + name="name_value", + ) + + # Make the request + response = await client.get_glossary_category(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetGlossaryCategoryRequest, dict]]): + The request object. Get GlossaryCategory Request + name (:class:`str`): + Required. The name of the GlossaryCategory to retrieve. + Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.GlossaryCategory: + A GlossaryCategory represents a + collection of GlossaryCategories and + GlossaryTerms within a Glossary that are + related to each other. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.GetGlossaryCategoryRequest): + request = business_glossary.GetGlossaryCategoryRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_glossary_category + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_glossary_categories( + self, + request: Optional[ + Union[business_glossary.ListGlossaryCategoriesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListGlossaryCategoriesAsyncPager: + r"""Lists GlossaryCategory resources in a Glossary. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_glossary_categories(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListGlossaryCategoriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_glossary_categories(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListGlossaryCategoriesRequest, dict]]): + The request object. List GlossaryCategories Request + parent (:class:`str`): + Required. The parent, which has this collection of + GlossaryCategories. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + Location is the Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.business_glossary_service.pagers.ListGlossaryCategoriesAsyncPager: + List GlossaryCategories Response + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.ListGlossaryCategoriesRequest): + request = business_glossary.ListGlossaryCategoriesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_glossary_categories + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListGlossaryCategoriesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_glossary_term( + self, + request: Optional[ + Union[business_glossary.CreateGlossaryTermRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + term: Optional[business_glossary.GlossaryTerm] = None, + term_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryTerm: + r"""Creates a new GlossaryTerm resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_glossary_term(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + term = dataplex_v1.GlossaryTerm() + term.parent = "parent_value" + + request = dataplex_v1.CreateGlossaryTermRequest( + parent="parent_value", + term_id="term_id_value", + term=term, + ) + + # Make the request + response = await client.create_glossary_term(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateGlossaryTermRequest, dict]]): + The request object. Creates a new GlossaryTerm under the + specified Glossary. + parent (:class:`str`): + Required. The parent resource where the GlossaryTerm + will be created. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + term (:class:`google.cloud.dataplex_v1.types.GlossaryTerm`): + Required. The GlossaryTerm to create. + This corresponds to the ``term`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + term_id (:class:`str`): + Required. GlossaryTerm identifier. + This corresponds to the ``term_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.GlossaryTerm: + GlossaryTerms are the core of + Glossary. A GlossaryTerm holds a rich + text description that can be attached to + Entries or specific columns to enrich + them. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, term, term_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.CreateGlossaryTermRequest): + request = business_glossary.CreateGlossaryTermRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if term is not None: + request.term = term + if term_id is not None: + request.term_id = term_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_glossary_term + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_glossary_term( + self, + request: Optional[ + Union[business_glossary.UpdateGlossaryTermRequest, dict] + ] = None, + *, + term: Optional[business_glossary.GlossaryTerm] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryTerm: + r"""Updates a GlossaryTerm resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_update_glossary_term(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + term = dataplex_v1.GlossaryTerm() + term.parent = "parent_value" + + request = dataplex_v1.UpdateGlossaryTermRequest( + term=term, + ) + + # Make the request + response = await client.update_glossary_term(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.UpdateGlossaryTermRequest, dict]]): + The request object. Update GlossaryTerm Request + term (:class:`google.cloud.dataplex_v1.types.GlossaryTerm`): + Required. The GlossaryTerm to update. The GlossaryTerm's + ``name`` field is used to identify the GlossaryTerm to + update. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/terms/{term_id} + + This corresponds to the ``term`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.GlossaryTerm: + GlossaryTerms are the core of + Glossary. A GlossaryTerm holds a rich + text description that can be attached to + Entries or specific columns to enrich + them. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [term, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.UpdateGlossaryTermRequest): + request = business_glossary.UpdateGlossaryTermRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if term is not None: + request.term = term + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_glossary_term + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("term.name", request.term.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_glossary_term( + self, + request: Optional[ + Union[business_glossary.DeleteGlossaryTermRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a GlossaryTerm resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_glossary_term(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteGlossaryTermRequest( + name="name_value", + ) + + # Make the request + await client.delete_glossary_term(request=request) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteGlossaryTermRequest, dict]]): + The request object. Delete GlossaryTerm Request + name (:class:`str`): + Required. The name of the GlossaryTerm to delete. + Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/terms/{term_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.DeleteGlossaryTermRequest): + request = business_glossary.DeleteGlossaryTermRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_glossary_term + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_glossary_term( + self, + request: Optional[Union[business_glossary.GetGlossaryTermRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryTerm: + r"""Gets a GlossaryTerm resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_glossary_term(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetGlossaryTermRequest( + name="name_value", + ) + + # Make the request + response = await client.get_glossary_term(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetGlossaryTermRequest, dict]]): + The request object. Get GlossaryTerm Request + name (:class:`str`): + Required. The name of the GlossaryTerm to retrieve. + Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/terms/{term_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.GlossaryTerm: + GlossaryTerms are the core of + Glossary. A GlossaryTerm holds a rich + text description that can be attached to + Entries or specific columns to enrich + them. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.GetGlossaryTermRequest): + request = business_glossary.GetGlossaryTermRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_glossary_term + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_glossary_terms( + self, + request: Optional[ + Union[business_glossary.ListGlossaryTermsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListGlossaryTermsAsyncPager: + r"""Lists GlossaryTerm resources in a Glossary. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_list_glossary_terms(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListGlossaryTermsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_glossary_terms(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.ListGlossaryTermsRequest, dict]]): + The request object. List GlossaryTerms Request + parent (:class:`str`): + Required. The parent, which has this collection of + GlossaryTerms. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.business_glossary_service.pagers.ListGlossaryTermsAsyncPager: + List GlossaryTerms Response + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.ListGlossaryTermsRequest): + request = business_glossary.ListGlossaryTermsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_glossary_terms + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListGlossaryTermsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "BusinessGlossaryServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +__all__ = ("BusinessGlossaryServiceAsyncClient",) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/client.py new file mode 100644 index 000000000000..3900368df428 --- /dev/null +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/client.py @@ -0,0 +1,3008 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging +import os +import re +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +import warnings + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf + +from google.cloud.dataplex_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.dataplex_v1.services.business_glossary_service import pagers +from google.cloud.dataplex_v1.types import business_glossary, service + +from .transports.base import DEFAULT_CLIENT_INFO, BusinessGlossaryServiceTransport +from .transports.grpc import BusinessGlossaryServiceGrpcTransport +from .transports.grpc_asyncio import BusinessGlossaryServiceGrpcAsyncIOTransport +from .transports.rest import BusinessGlossaryServiceRestTransport + + +class BusinessGlossaryServiceClientMeta(type): + """Metaclass for the BusinessGlossaryService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[BusinessGlossaryServiceTransport]] + _transport_registry["grpc"] = BusinessGlossaryServiceGrpcTransport + _transport_registry["grpc_asyncio"] = BusinessGlossaryServiceGrpcAsyncIOTransport + _transport_registry["rest"] = BusinessGlossaryServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[BusinessGlossaryServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class BusinessGlossaryServiceClient(metaclass=BusinessGlossaryServiceClientMeta): + """BusinessGlossaryService provides APIs for managing business + glossary resources for enterprise customers. + The resources currently supported in Business Glossary are: + + 1. Glossary + 2. GlossaryCategory + 3. GlossaryTerm + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "dataplex.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "dataplex.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BusinessGlossaryServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + BusinessGlossaryServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> BusinessGlossaryServiceTransport: + """Returns the transport used by the client instance. + + Returns: + BusinessGlossaryServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def glossary_path( + project: str, + location: str, + glossary: str, + ) -> str: + """Returns a fully-qualified glossary string.""" + return "projects/{project}/locations/{location}/glossaries/{glossary}".format( + project=project, + location=location, + glossary=glossary, + ) + + @staticmethod + def parse_glossary_path(path: str) -> Dict[str, str]: + """Parses a glossary path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/glossaries/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def glossary_category_path( + project: str, + location: str, + glossary: str, + glossary_category: str, + ) -> str: + """Returns a fully-qualified glossary_category string.""" + return "projects/{project}/locations/{location}/glossaries/{glossary}/categories/{glossary_category}".format( + project=project, + location=location, + glossary=glossary, + glossary_category=glossary_category, + ) + + @staticmethod + def parse_glossary_category_path(path: str) -> Dict[str, str]: + """Parses a glossary_category path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/glossaries/(?P.+?)/categories/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def glossary_term_path( + project: str, + location: str, + glossary: str, + glossary_term: str, + ) -> str: + """Returns a fully-qualified glossary_term string.""" + return "projects/{project}/locations/{location}/glossaries/{glossary}/terms/{glossary_term}".format( + project=project, + location=location, + glossary=glossary, + glossary_term=glossary_term, + ) + + @staticmethod + def parse_glossary_term_path(path: str) -> Dict[str, str]: + """Parses a glossary_term path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/glossaries/(?P.+?)/terms/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = BusinessGlossaryServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = BusinessGlossaryServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ( + BusinessGlossaryServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = BusinessGlossaryServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + BusinessGlossaryServiceTransport, + Callable[..., BusinessGlossaryServiceTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the business glossary service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,BusinessGlossaryServiceTransport,Callable[..., BusinessGlossaryServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the BusinessGlossaryServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = BusinessGlossaryServiceClient._read_environment_variables() + self._client_cert_source = ( + BusinessGlossaryServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + ) + self._universe_domain = BusinessGlossaryServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint = None # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, BusinessGlossaryServiceTransport) + if transport_provided: + # transport is a BusinessGlossaryServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = cast(BusinessGlossaryServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or BusinessGlossaryServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[BusinessGlossaryServiceTransport], + Callable[..., BusinessGlossaryServiceTransport], + ] = ( + BusinessGlossaryServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., BusinessGlossaryServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.dataplex_v1.BusinessGlossaryServiceClient`.", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "credentialsType": None, + }, + ) + + def create_glossary( + self, + request: Optional[Union[business_glossary.CreateGlossaryRequest, dict]] = None, + *, + parent: Optional[str] = None, + glossary: Optional[business_glossary.Glossary] = None, + glossary_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a new Glossary resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_glossary(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateGlossaryRequest( + parent="parent_value", + glossary_id="glossary_id_value", + ) + + # Make the request + operation = client.create_glossary(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateGlossaryRequest, dict]): + The request object. Create Glossary Request + parent (str): + Required. The parent resource where this Glossary will + be created. Format: + projects/{project_id_or_number}/locations/{location_id} + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + glossary (google.cloud.dataplex_v1.types.Glossary): + Required. The Glossary to create. + This corresponds to the ``glossary`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + glossary_id (str): + Required. Glossary ID: Glossary + identifier. + + This corresponds to the ``glossary_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Glossary` A Glossary represents a collection of GlossaryCategories and GlossaryTerms + defined by the user. Glossary is a top level resource + and is the Google Cloud parent resource of all the + GlossaryCategories and GlossaryTerms within it. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, glossary, glossary_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.CreateGlossaryRequest): + request = business_glossary.CreateGlossaryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if glossary is not None: + request.glossary = glossary + if glossary_id is not None: + request.glossary_id = glossary_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_glossary] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + business_glossary.Glossary, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_glossary( + self, + request: Optional[Union[business_glossary.UpdateGlossaryRequest, dict]] = None, + *, + glossary: Optional[business_glossary.Glossary] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates a Glossary resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_glossary(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateGlossaryRequest( + ) + + # Make the request + operation = client.update_glossary(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateGlossaryRequest, dict]): + The request object. Update Glossary Request + glossary (google.cloud.dataplex_v1.types.Glossary): + Required. The Glossary to update. The Glossary's + ``name`` field is used to identify the Glossary to + update. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + + This corresponds to the ``glossary`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.dataplex_v1.types.Glossary` A Glossary represents a collection of GlossaryCategories and GlossaryTerms + defined by the user. Glossary is a top level resource + and is the Google Cloud parent resource of all the + GlossaryCategories and GlossaryTerms within it. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [glossary, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.UpdateGlossaryRequest): + request = business_glossary.UpdateGlossaryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if glossary is not None: + request.glossary = glossary + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_glossary] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("glossary.name", request.glossary.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + business_glossary.Glossary, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_glossary( + self, + request: Optional[Union[business_glossary.DeleteGlossaryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes a Glossary resource. All the categories and + terms within the Glossary must be deleted before the + Glossary can be deleted. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_glossary(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteGlossaryRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_glossary(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteGlossaryRequest, dict]): + The request object. Delete Glossary Request + name (str): + Required. The name of the Glossary to delete. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.DeleteGlossaryRequest): + request = business_glossary.DeleteGlossaryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_glossary] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=service.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_glossary( + self, + request: Optional[Union[business_glossary.GetGlossaryRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.Glossary: + r"""Gets a Glossary resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_glossary(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetGlossaryRequest( + name="name_value", + ) + + # Make the request + response = client.get_glossary(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetGlossaryRequest, dict]): + The request object. Get Glossary Request + name (str): + Required. The name of the Glossary to retrieve. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.Glossary: + A Glossary represents a collection of + GlossaryCategories and GlossaryTerms + defined by the user. Glossary is a top + level resource and is the Google Cloud + parent resource of all the + GlossaryCategories and GlossaryTerms + within it. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.GetGlossaryRequest): + request = business_glossary.GetGlossaryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_glossary] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_glossaries( + self, + request: Optional[Union[business_glossary.ListGlossariesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListGlossariesPager: + r"""Lists Glossary resources in a project and location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_glossaries(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListGlossariesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_glossaries(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListGlossariesRequest, dict]): + The request object. List Glossaries Request + parent (str): + Required. The parent, which has this collection of + Glossaries. Format: + projects/{project_id_or_number}/locations/{location_id} + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.business_glossary_service.pagers.ListGlossariesPager: + List Glossaries Response + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.ListGlossariesRequest): + request = business_glossary.ListGlossariesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_glossaries] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListGlossariesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_glossary_category( + self, + request: Optional[ + Union[business_glossary.CreateGlossaryCategoryRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + category: Optional[business_glossary.GlossaryCategory] = None, + category_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryCategory: + r"""Creates a new GlossaryCategory resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_glossary_category(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + category = dataplex_v1.GlossaryCategory() + category.parent = "parent_value" + + request = dataplex_v1.CreateGlossaryCategoryRequest( + parent="parent_value", + category_id="category_id_value", + category=category, + ) + + # Make the request + response = client.create_glossary_category(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateGlossaryCategoryRequest, dict]): + The request object. Creates a new GlossaryCategory under + the specified Glossary. + parent (str): + Required. The parent resource where this + GlossaryCategory will be created. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + where ``locationId`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + category (google.cloud.dataplex_v1.types.GlossaryCategory): + Required. The GlossaryCategory to + create. + + This corresponds to the ``category`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + category_id (str): + Required. GlossaryCategory + identifier. + + This corresponds to the ``category_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.GlossaryCategory: + A GlossaryCategory represents a + collection of GlossaryCategories and + GlossaryTerms within a Glossary that are + related to each other. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, category, category_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.CreateGlossaryCategoryRequest): + request = business_glossary.CreateGlossaryCategoryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if category is not None: + request.category = category + if category_id is not None: + request.category_id = category_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_glossary_category] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_glossary_category( + self, + request: Optional[ + Union[business_glossary.UpdateGlossaryCategoryRequest, dict] + ] = None, + *, + category: Optional[business_glossary.GlossaryCategory] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryCategory: + r"""Updates a GlossaryCategory resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_glossary_category(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + category = dataplex_v1.GlossaryCategory() + category.parent = "parent_value" + + request = dataplex_v1.UpdateGlossaryCategoryRequest( + category=category, + ) + + # Make the request + response = client.update_glossary_category(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateGlossaryCategoryRequest, dict]): + The request object. Update GlossaryCategory Request + category (google.cloud.dataplex_v1.types.GlossaryCategory): + Required. The GlossaryCategory to update. The + GlossaryCategory's ``name`` field is used to identify + the GlossaryCategory to update. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id} + + This corresponds to the ``category`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.GlossaryCategory: + A GlossaryCategory represents a + collection of GlossaryCategories and + GlossaryTerms within a Glossary that are + related to each other. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [category, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.UpdateGlossaryCategoryRequest): + request = business_glossary.UpdateGlossaryCategoryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if category is not None: + request.category = category + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_glossary_category] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("category.name", request.category.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_glossary_category( + self, + request: Optional[ + Union[business_glossary.DeleteGlossaryCategoryRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a GlossaryCategory resource. All the + GlossaryCategories and GlossaryTerms nested directly + under the specified GlossaryCategory will be moved one + level up to the parent in the hierarchy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_glossary_category(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteGlossaryCategoryRequest( + name="name_value", + ) + + # Make the request + client.delete_glossary_category(request=request) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteGlossaryCategoryRequest, dict]): + The request object. Delete GlossaryCategory Request + name (str): + Required. The name of the GlossaryCategory to delete. + Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.DeleteGlossaryCategoryRequest): + request = business_glossary.DeleteGlossaryCategoryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_glossary_category] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_glossary_category( + self, + request: Optional[ + Union[business_glossary.GetGlossaryCategoryRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryCategory: + r"""Gets a GlossaryCategory resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_glossary_category(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetGlossaryCategoryRequest( + name="name_value", + ) + + # Make the request + response = client.get_glossary_category(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetGlossaryCategoryRequest, dict]): + The request object. Get GlossaryCategory Request + name (str): + Required. The name of the GlossaryCategory to retrieve. + Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.GlossaryCategory: + A GlossaryCategory represents a + collection of GlossaryCategories and + GlossaryTerms within a Glossary that are + related to each other. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.GetGlossaryCategoryRequest): + request = business_glossary.GetGlossaryCategoryRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_glossary_category] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_glossary_categories( + self, + request: Optional[ + Union[business_glossary.ListGlossaryCategoriesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListGlossaryCategoriesPager: + r"""Lists GlossaryCategory resources in a Glossary. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_glossary_categories(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListGlossaryCategoriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_glossary_categories(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListGlossaryCategoriesRequest, dict]): + The request object. List GlossaryCategories Request + parent (str): + Required. The parent, which has this collection of + GlossaryCategories. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + Location is the Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.business_glossary_service.pagers.ListGlossaryCategoriesPager: + List GlossaryCategories Response + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.ListGlossaryCategoriesRequest): + request = business_glossary.ListGlossaryCategoriesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_glossary_categories] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListGlossaryCategoriesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_glossary_term( + self, + request: Optional[ + Union[business_glossary.CreateGlossaryTermRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + term: Optional[business_glossary.GlossaryTerm] = None, + term_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryTerm: + r"""Creates a new GlossaryTerm resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_glossary_term(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + term = dataplex_v1.GlossaryTerm() + term.parent = "parent_value" + + request = dataplex_v1.CreateGlossaryTermRequest( + parent="parent_value", + term_id="term_id_value", + term=term, + ) + + # Make the request + response = client.create_glossary_term(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateGlossaryTermRequest, dict]): + The request object. Creates a new GlossaryTerm under the + specified Glossary. + parent (str): + Required. The parent resource where the GlossaryTerm + will be created. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + term (google.cloud.dataplex_v1.types.GlossaryTerm): + Required. The GlossaryTerm to create. + This corresponds to the ``term`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + term_id (str): + Required. GlossaryTerm identifier. + This corresponds to the ``term_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.GlossaryTerm: + GlossaryTerms are the core of + Glossary. A GlossaryTerm holds a rich + text description that can be attached to + Entries or specific columns to enrich + them. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, term, term_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.CreateGlossaryTermRequest): + request = business_glossary.CreateGlossaryTermRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if term is not None: + request.term = term + if term_id is not None: + request.term_id = term_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_glossary_term] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_glossary_term( + self, + request: Optional[ + Union[business_glossary.UpdateGlossaryTermRequest, dict] + ] = None, + *, + term: Optional[business_glossary.GlossaryTerm] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryTerm: + r"""Updates a GlossaryTerm resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_update_glossary_term(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + term = dataplex_v1.GlossaryTerm() + term.parent = "parent_value" + + request = dataplex_v1.UpdateGlossaryTermRequest( + term=term, + ) + + # Make the request + response = client.update_glossary_term(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.UpdateGlossaryTermRequest, dict]): + The request object. Update GlossaryTerm Request + term (google.cloud.dataplex_v1.types.GlossaryTerm): + Required. The GlossaryTerm to update. The GlossaryTerm's + ``name`` field is used to identify the GlossaryTerm to + update. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/terms/{term_id} + + This corresponds to the ``term`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to + update. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.GlossaryTerm: + GlossaryTerms are the core of + Glossary. A GlossaryTerm holds a rich + text description that can be attached to + Entries or specific columns to enrich + them. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [term, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.UpdateGlossaryTermRequest): + request = business_glossary.UpdateGlossaryTermRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if term is not None: + request.term = term + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_glossary_term] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("term.name", request.term.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_glossary_term( + self, + request: Optional[ + Union[business_glossary.DeleteGlossaryTermRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a GlossaryTerm resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_glossary_term(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteGlossaryTermRequest( + name="name_value", + ) + + # Make the request + client.delete_glossary_term(request=request) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteGlossaryTermRequest, dict]): + The request object. Delete GlossaryTerm Request + name (str): + Required. The name of the GlossaryTerm to delete. + Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/terms/{term_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.DeleteGlossaryTermRequest): + request = business_glossary.DeleteGlossaryTermRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_glossary_term] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_glossary_term( + self, + request: Optional[Union[business_glossary.GetGlossaryTermRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryTerm: + r"""Gets a GlossaryTerm resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_glossary_term(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetGlossaryTermRequest( + name="name_value", + ) + + # Make the request + response = client.get_glossary_term(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetGlossaryTermRequest, dict]): + The request object. Get GlossaryTerm Request + name (str): + Required. The name of the GlossaryTerm to retrieve. + Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/terms/{term_id} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.GlossaryTerm: + GlossaryTerms are the core of + Glossary. A GlossaryTerm holds a rich + text description that can be attached to + Entries or specific columns to enrich + them. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.GetGlossaryTermRequest): + request = business_glossary.GetGlossaryTermRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_glossary_term] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_glossary_terms( + self, + request: Optional[ + Union[business_glossary.ListGlossaryTermsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListGlossaryTermsPager: + r"""Lists GlossaryTerm resources in a Glossary. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_list_glossary_terms(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListGlossaryTermsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_glossary_terms(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.ListGlossaryTermsRequest, dict]): + The request object. List GlossaryTerms Request + parent (str): + Required. The parent, which has this collection of + GlossaryTerms. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + where ``location_id`` refers to a Google Cloud region. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.services.business_glossary_service.pagers.ListGlossaryTermsPager: + List GlossaryTerms Response + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, business_glossary.ListGlossaryTermsRequest): + request = business_glossary.ListGlossaryTermsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_glossary_terms] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListGlossaryTermsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "BusinessGlossaryServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[operations_pb2.ListOperationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.ListOperationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[operations_pb2.GetOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.GetOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def delete_operation( + self, + request: Optional[operations_pb2.DeleteOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.DeleteOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[operations_pb2.CancelOperationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = operations_pb2.CancelOperationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_location( + self, + request: Optional[locations_pb2.GetLocationRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Gets information about a location. + + Args: + request (:class:`~.location_pb2.GetLocationRequest`): + The request object. Request message for + `GetLocation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.Location: + Location object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.GetLocationRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_location] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def list_locations( + self, + request: Optional[locations_pb2.ListLocationsRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Lists information about the supported locations for this service. + + Args: + request (:class:`~.location_pb2.ListLocationsRequest`): + The request object. Request message for + `ListLocations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.location_pb2.ListLocationsResponse: + Response message for ``ListLocations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = locations_pb2.ListLocationsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_locations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ("BusinessGlossaryServiceClient",) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/pagers.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/pagers.py new file mode 100644 index 000000000000..96480e5393cd --- /dev/null +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/pagers.py @@ -0,0 +1,513 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.dataplex_v1.types import business_glossary + + +class ListGlossariesPager: + """A pager for iterating through ``list_glossaries`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListGlossariesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``glossaries`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListGlossaries`` requests and continue to iterate + through the ``glossaries`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListGlossariesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., business_glossary.ListGlossariesResponse], + request: business_glossary.ListGlossariesRequest, + response: business_glossary.ListGlossariesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListGlossariesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListGlossariesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = business_glossary.ListGlossariesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[business_glossary.ListGlossariesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[business_glossary.Glossary]: + for page in self.pages: + yield from page.glossaries + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListGlossariesAsyncPager: + """A pager for iterating through ``list_glossaries`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListGlossariesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``glossaries`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListGlossaries`` requests and continue to iterate + through the ``glossaries`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListGlossariesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[business_glossary.ListGlossariesResponse]], + request: business_glossary.ListGlossariesRequest, + response: business_glossary.ListGlossariesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListGlossariesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListGlossariesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = business_glossary.ListGlossariesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[business_glossary.ListGlossariesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[business_glossary.Glossary]: + async def async_generator(): + async for page in self.pages: + for response in page.glossaries: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListGlossaryCategoriesPager: + """A pager for iterating through ``list_glossary_categories`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListGlossaryCategoriesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``categories`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListGlossaryCategories`` requests and continue to iterate + through the ``categories`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListGlossaryCategoriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., business_glossary.ListGlossaryCategoriesResponse], + request: business_glossary.ListGlossaryCategoriesRequest, + response: business_glossary.ListGlossaryCategoriesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListGlossaryCategoriesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListGlossaryCategoriesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = business_glossary.ListGlossaryCategoriesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[business_glossary.ListGlossaryCategoriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[business_glossary.GlossaryCategory]: + for page in self.pages: + yield from page.categories + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListGlossaryCategoriesAsyncPager: + """A pager for iterating through ``list_glossary_categories`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListGlossaryCategoriesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``categories`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListGlossaryCategories`` requests and continue to iterate + through the ``categories`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListGlossaryCategoriesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[business_glossary.ListGlossaryCategoriesResponse] + ], + request: business_glossary.ListGlossaryCategoriesRequest, + response: business_glossary.ListGlossaryCategoriesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListGlossaryCategoriesRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListGlossaryCategoriesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = business_glossary.ListGlossaryCategoriesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[business_glossary.ListGlossaryCategoriesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[business_glossary.GlossaryCategory]: + async def async_generator(): + async for page in self.pages: + for response in page.categories: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListGlossaryTermsPager: + """A pager for iterating through ``list_glossary_terms`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListGlossaryTermsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``terms`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListGlossaryTerms`` requests and continue to iterate + through the ``terms`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListGlossaryTermsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., business_glossary.ListGlossaryTermsResponse], + request: business_glossary.ListGlossaryTermsRequest, + response: business_glossary.ListGlossaryTermsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListGlossaryTermsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListGlossaryTermsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = business_glossary.ListGlossaryTermsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[business_glossary.ListGlossaryTermsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[business_glossary.GlossaryTerm]: + for page in self.pages: + yield from page.terms + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListGlossaryTermsAsyncPager: + """A pager for iterating through ``list_glossary_terms`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dataplex_v1.types.ListGlossaryTermsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``terms`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListGlossaryTerms`` requests and continue to iterate + through the ``terms`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dataplex_v1.types.ListGlossaryTermsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[business_glossary.ListGlossaryTermsResponse]], + request: business_glossary.ListGlossaryTermsRequest, + response: business_glossary.ListGlossaryTermsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dataplex_v1.types.ListGlossaryTermsRequest): + The initial request object. + response (google.cloud.dataplex_v1.types.ListGlossaryTermsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = business_glossary.ListGlossaryTermsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[business_glossary.ListGlossaryTermsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[business_glossary.GlossaryTerm]: + async def async_generator(): + async for page in self.pages: + for response in page.terms: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/README.rst b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/README.rst new file mode 100644 index 000000000000..2b7007a38bee --- /dev/null +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`BusinessGlossaryServiceTransport` is the ABC for all transports. +- public child `BusinessGlossaryServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `BusinessGlossaryServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseBusinessGlossaryServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `BusinessGlossaryServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/__init__.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/__init__.py new file mode 100644 index 000000000000..62b48ea0365b --- /dev/null +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import BusinessGlossaryServiceTransport +from .grpc import BusinessGlossaryServiceGrpcTransport +from .grpc_asyncio import BusinessGlossaryServiceGrpcAsyncIOTransport +from .rest import ( + BusinessGlossaryServiceRestInterceptor, + BusinessGlossaryServiceRestTransport, +) + +# Compile a registry of transports. +_transport_registry = ( + OrderedDict() +) # type: Dict[str, Type[BusinessGlossaryServiceTransport]] +_transport_registry["grpc"] = BusinessGlossaryServiceGrpcTransport +_transport_registry["grpc_asyncio"] = BusinessGlossaryServiceGrpcAsyncIOTransport +_transport_registry["rest"] = BusinessGlossaryServiceRestTransport + +__all__ = ( + "BusinessGlossaryServiceTransport", + "BusinessGlossaryServiceGrpcTransport", + "BusinessGlossaryServiceGrpcAsyncIOTransport", + "BusinessGlossaryServiceRestTransport", + "BusinessGlossaryServiceRestInterceptor", +) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/base.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/base.py new file mode 100644 index 000000000000..a3b303b5d5be --- /dev/null +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/base.py @@ -0,0 +1,479 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore +import google.protobuf +from google.protobuf import empty_pb2 # type: ignore + +from google.cloud.dataplex_v1 import gapic_version as package_version +from google.cloud.dataplex_v1.types import business_glossary + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class BusinessGlossaryServiceTransport(abc.ABC): + """Abstract transport class for BusinessGlossaryService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "dataplex.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_glossary: gapic_v1.method.wrap_method( + self.create_glossary, + default_timeout=None, + client_info=client_info, + ), + self.update_glossary: gapic_v1.method.wrap_method( + self.update_glossary, + default_timeout=None, + client_info=client_info, + ), + self.delete_glossary: gapic_v1.method.wrap_method( + self.delete_glossary, + default_timeout=None, + client_info=client_info, + ), + self.get_glossary: gapic_v1.method.wrap_method( + self.get_glossary, + default_timeout=None, + client_info=client_info, + ), + self.list_glossaries: gapic_v1.method.wrap_method( + self.list_glossaries, + default_timeout=None, + client_info=client_info, + ), + self.create_glossary_category: gapic_v1.method.wrap_method( + self.create_glossary_category, + default_timeout=None, + client_info=client_info, + ), + self.update_glossary_category: gapic_v1.method.wrap_method( + self.update_glossary_category, + default_timeout=None, + client_info=client_info, + ), + self.delete_glossary_category: gapic_v1.method.wrap_method( + self.delete_glossary_category, + default_timeout=None, + client_info=client_info, + ), + self.get_glossary_category: gapic_v1.method.wrap_method( + self.get_glossary_category, + default_timeout=None, + client_info=client_info, + ), + self.list_glossary_categories: gapic_v1.method.wrap_method( + self.list_glossary_categories, + default_timeout=None, + client_info=client_info, + ), + self.create_glossary_term: gapic_v1.method.wrap_method( + self.create_glossary_term, + default_timeout=None, + client_info=client_info, + ), + self.update_glossary_term: gapic_v1.method.wrap_method( + self.update_glossary_term, + default_timeout=None, + client_info=client_info, + ), + self.delete_glossary_term: gapic_v1.method.wrap_method( + self.delete_glossary_term, + default_timeout=None, + client_info=client_info, + ), + self.get_glossary_term: gapic_v1.method.wrap_method( + self.get_glossary_term, + default_timeout=None, + client_info=client_info, + ), + self.list_glossary_terms: gapic_v1.method.wrap_method( + self.list_glossary_terms, + default_timeout=None, + client_info=client_info, + ), + self.get_location: gapic_v1.method.wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: gapic_v1.method.wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def create_glossary( + self, + ) -> Callable[ + [business_glossary.CreateGlossaryRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_glossary( + self, + ) -> Callable[ + [business_glossary.UpdateGlossaryRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_glossary( + self, + ) -> Callable[ + [business_glossary.DeleteGlossaryRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_glossary( + self, + ) -> Callable[ + [business_glossary.GetGlossaryRequest], + Union[business_glossary.Glossary, Awaitable[business_glossary.Glossary]], + ]: + raise NotImplementedError() + + @property + def list_glossaries( + self, + ) -> Callable[ + [business_glossary.ListGlossariesRequest], + Union[ + business_glossary.ListGlossariesResponse, + Awaitable[business_glossary.ListGlossariesResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_glossary_category( + self, + ) -> Callable[ + [business_glossary.CreateGlossaryCategoryRequest], + Union[ + business_glossary.GlossaryCategory, + Awaitable[business_glossary.GlossaryCategory], + ], + ]: + raise NotImplementedError() + + @property + def update_glossary_category( + self, + ) -> Callable[ + [business_glossary.UpdateGlossaryCategoryRequest], + Union[ + business_glossary.GlossaryCategory, + Awaitable[business_glossary.GlossaryCategory], + ], + ]: + raise NotImplementedError() + + @property + def delete_glossary_category( + self, + ) -> Callable[ + [business_glossary.DeleteGlossaryCategoryRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def get_glossary_category( + self, + ) -> Callable[ + [business_glossary.GetGlossaryCategoryRequest], + Union[ + business_glossary.GlossaryCategory, + Awaitable[business_glossary.GlossaryCategory], + ], + ]: + raise NotImplementedError() + + @property + def list_glossary_categories( + self, + ) -> Callable[ + [business_glossary.ListGlossaryCategoriesRequest], + Union[ + business_glossary.ListGlossaryCategoriesResponse, + Awaitable[business_glossary.ListGlossaryCategoriesResponse], + ], + ]: + raise NotImplementedError() + + @property + def create_glossary_term( + self, + ) -> Callable[ + [business_glossary.CreateGlossaryTermRequest], + Union[ + business_glossary.GlossaryTerm, Awaitable[business_glossary.GlossaryTerm] + ], + ]: + raise NotImplementedError() + + @property + def update_glossary_term( + self, + ) -> Callable[ + [business_glossary.UpdateGlossaryTermRequest], + Union[ + business_glossary.GlossaryTerm, Awaitable[business_glossary.GlossaryTerm] + ], + ]: + raise NotImplementedError() + + @property + def delete_glossary_term( + self, + ) -> Callable[ + [business_glossary.DeleteGlossaryTermRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def get_glossary_term( + self, + ) -> Callable[ + [business_glossary.GetGlossaryTermRequest], + Union[ + business_glossary.GlossaryTerm, Awaitable[business_glossary.GlossaryTerm] + ], + ]: + raise NotImplementedError() + + @property + def list_glossary_terms( + self, + ) -> Callable[ + [business_glossary.ListGlossaryTermsRequest], + Union[ + business_glossary.ListGlossaryTermsResponse, + Awaitable[business_glossary.ListGlossaryTermsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None,]: + raise NotImplementedError() + + @property + def get_location( + self, + ) -> Callable[ + [locations_pb2.GetLocationRequest], + Union[locations_pb2.Location, Awaitable[locations_pb2.Location]], + ]: + raise NotImplementedError() + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], + Union[ + locations_pb2.ListLocationsResponse, + Awaitable[locations_pb2.ListLocationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("BusinessGlossaryServiceTransport",) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/grpc.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/grpc.py new file mode 100644 index 000000000000..0acb54a4bd42 --- /dev/null +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/grpc.py @@ -0,0 +1,883 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore + +from google.cloud.dataplex_v1.types import business_glossary + +from .base import DEFAULT_CLIENT_INFO, BusinessGlossaryServiceTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class BusinessGlossaryServiceGrpcTransport(BusinessGlossaryServiceTransport): + """gRPC backend transport for BusinessGlossaryService. + + BusinessGlossaryService provides APIs for managing business + glossary resources for enterprise customers. + The resources currently supported in Business Glossary are: + + 1. Glossary + 2. GlossaryCategory + 3. GlossaryTerm + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "dataplex.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "dataplex.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_glossary( + self, + ) -> Callable[[business_glossary.CreateGlossaryRequest], operations_pb2.Operation]: + r"""Return a callable for the create glossary method over gRPC. + + Creates a new Glossary resource. + + Returns: + Callable[[~.CreateGlossaryRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_glossary" not in self._stubs: + self._stubs["create_glossary"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/CreateGlossary", + request_serializer=business_glossary.CreateGlossaryRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_glossary"] + + @property + def update_glossary( + self, + ) -> Callable[[business_glossary.UpdateGlossaryRequest], operations_pb2.Operation]: + r"""Return a callable for the update glossary method over gRPC. + + Updates a Glossary resource. + + Returns: + Callable[[~.UpdateGlossaryRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_glossary" not in self._stubs: + self._stubs["update_glossary"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/UpdateGlossary", + request_serializer=business_glossary.UpdateGlossaryRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_glossary"] + + @property + def delete_glossary( + self, + ) -> Callable[[business_glossary.DeleteGlossaryRequest], operations_pb2.Operation]: + r"""Return a callable for the delete glossary method over gRPC. + + Deletes a Glossary resource. All the categories and + terms within the Glossary must be deleted before the + Glossary can be deleted. + + Returns: + Callable[[~.DeleteGlossaryRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_glossary" not in self._stubs: + self._stubs["delete_glossary"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/DeleteGlossary", + request_serializer=business_glossary.DeleteGlossaryRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_glossary"] + + @property + def get_glossary( + self, + ) -> Callable[[business_glossary.GetGlossaryRequest], business_glossary.Glossary]: + r"""Return a callable for the get glossary method over gRPC. + + Gets a Glossary resource. + + Returns: + Callable[[~.GetGlossaryRequest], + ~.Glossary]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_glossary" not in self._stubs: + self._stubs["get_glossary"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/GetGlossary", + request_serializer=business_glossary.GetGlossaryRequest.serialize, + response_deserializer=business_glossary.Glossary.deserialize, + ) + return self._stubs["get_glossary"] + + @property + def list_glossaries( + self, + ) -> Callable[ + [business_glossary.ListGlossariesRequest], + business_glossary.ListGlossariesResponse, + ]: + r"""Return a callable for the list glossaries method over gRPC. + + Lists Glossary resources in a project and location. + + Returns: + Callable[[~.ListGlossariesRequest], + ~.ListGlossariesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_glossaries" not in self._stubs: + self._stubs["list_glossaries"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/ListGlossaries", + request_serializer=business_glossary.ListGlossariesRequest.serialize, + response_deserializer=business_glossary.ListGlossariesResponse.deserialize, + ) + return self._stubs["list_glossaries"] + + @property + def create_glossary_category( + self, + ) -> Callable[ + [business_glossary.CreateGlossaryCategoryRequest], + business_glossary.GlossaryCategory, + ]: + r"""Return a callable for the create glossary category method over gRPC. + + Creates a new GlossaryCategory resource. + + Returns: + Callable[[~.CreateGlossaryCategoryRequest], + ~.GlossaryCategory]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_glossary_category" not in self._stubs: + self._stubs["create_glossary_category"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/CreateGlossaryCategory", + request_serializer=business_glossary.CreateGlossaryCategoryRequest.serialize, + response_deserializer=business_glossary.GlossaryCategory.deserialize, + ) + return self._stubs["create_glossary_category"] + + @property + def update_glossary_category( + self, + ) -> Callable[ + [business_glossary.UpdateGlossaryCategoryRequest], + business_glossary.GlossaryCategory, + ]: + r"""Return a callable for the update glossary category method over gRPC. + + Updates a GlossaryCategory resource. + + Returns: + Callable[[~.UpdateGlossaryCategoryRequest], + ~.GlossaryCategory]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_glossary_category" not in self._stubs: + self._stubs["update_glossary_category"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/UpdateGlossaryCategory", + request_serializer=business_glossary.UpdateGlossaryCategoryRequest.serialize, + response_deserializer=business_glossary.GlossaryCategory.deserialize, + ) + return self._stubs["update_glossary_category"] + + @property + def delete_glossary_category( + self, + ) -> Callable[[business_glossary.DeleteGlossaryCategoryRequest], empty_pb2.Empty]: + r"""Return a callable for the delete glossary category method over gRPC. + + Deletes a GlossaryCategory resource. All the + GlossaryCategories and GlossaryTerms nested directly + under the specified GlossaryCategory will be moved one + level up to the parent in the hierarchy. + + Returns: + Callable[[~.DeleteGlossaryCategoryRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_glossary_category" not in self._stubs: + self._stubs["delete_glossary_category"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/DeleteGlossaryCategory", + request_serializer=business_glossary.DeleteGlossaryCategoryRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_glossary_category"] + + @property + def get_glossary_category( + self, + ) -> Callable[ + [business_glossary.GetGlossaryCategoryRequest], + business_glossary.GlossaryCategory, + ]: + r"""Return a callable for the get glossary category method over gRPC. + + Gets a GlossaryCategory resource. + + Returns: + Callable[[~.GetGlossaryCategoryRequest], + ~.GlossaryCategory]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_glossary_category" not in self._stubs: + self._stubs["get_glossary_category"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/GetGlossaryCategory", + request_serializer=business_glossary.GetGlossaryCategoryRequest.serialize, + response_deserializer=business_glossary.GlossaryCategory.deserialize, + ) + return self._stubs["get_glossary_category"] + + @property + def list_glossary_categories( + self, + ) -> Callable[ + [business_glossary.ListGlossaryCategoriesRequest], + business_glossary.ListGlossaryCategoriesResponse, + ]: + r"""Return a callable for the list glossary categories method over gRPC. + + Lists GlossaryCategory resources in a Glossary. + + Returns: + Callable[[~.ListGlossaryCategoriesRequest], + ~.ListGlossaryCategoriesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_glossary_categories" not in self._stubs: + self._stubs["list_glossary_categories"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/ListGlossaryCategories", + request_serializer=business_glossary.ListGlossaryCategoriesRequest.serialize, + response_deserializer=business_glossary.ListGlossaryCategoriesResponse.deserialize, + ) + return self._stubs["list_glossary_categories"] + + @property + def create_glossary_term( + self, + ) -> Callable[ + [business_glossary.CreateGlossaryTermRequest], business_glossary.GlossaryTerm + ]: + r"""Return a callable for the create glossary term method over gRPC. + + Creates a new GlossaryTerm resource. + + Returns: + Callable[[~.CreateGlossaryTermRequest], + ~.GlossaryTerm]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_glossary_term" not in self._stubs: + self._stubs["create_glossary_term"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/CreateGlossaryTerm", + request_serializer=business_glossary.CreateGlossaryTermRequest.serialize, + response_deserializer=business_glossary.GlossaryTerm.deserialize, + ) + return self._stubs["create_glossary_term"] + + @property + def update_glossary_term( + self, + ) -> Callable[ + [business_glossary.UpdateGlossaryTermRequest], business_glossary.GlossaryTerm + ]: + r"""Return a callable for the update glossary term method over gRPC. + + Updates a GlossaryTerm resource. + + Returns: + Callable[[~.UpdateGlossaryTermRequest], + ~.GlossaryTerm]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_glossary_term" not in self._stubs: + self._stubs["update_glossary_term"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/UpdateGlossaryTerm", + request_serializer=business_glossary.UpdateGlossaryTermRequest.serialize, + response_deserializer=business_glossary.GlossaryTerm.deserialize, + ) + return self._stubs["update_glossary_term"] + + @property + def delete_glossary_term( + self, + ) -> Callable[[business_glossary.DeleteGlossaryTermRequest], empty_pb2.Empty]: + r"""Return a callable for the delete glossary term method over gRPC. + + Deletes a GlossaryTerm resource. + + Returns: + Callable[[~.DeleteGlossaryTermRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_glossary_term" not in self._stubs: + self._stubs["delete_glossary_term"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/DeleteGlossaryTerm", + request_serializer=business_glossary.DeleteGlossaryTermRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_glossary_term"] + + @property + def get_glossary_term( + self, + ) -> Callable[ + [business_glossary.GetGlossaryTermRequest], business_glossary.GlossaryTerm + ]: + r"""Return a callable for the get glossary term method over gRPC. + + Gets a GlossaryTerm resource. + + Returns: + Callable[[~.GetGlossaryTermRequest], + ~.GlossaryTerm]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_glossary_term" not in self._stubs: + self._stubs["get_glossary_term"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/GetGlossaryTerm", + request_serializer=business_glossary.GetGlossaryTermRequest.serialize, + response_deserializer=business_glossary.GlossaryTerm.deserialize, + ) + return self._stubs["get_glossary_term"] + + @property + def list_glossary_terms( + self, + ) -> Callable[ + [business_glossary.ListGlossaryTermsRequest], + business_glossary.ListGlossaryTermsResponse, + ]: + r"""Return a callable for the list glossary terms method over gRPC. + + Lists GlossaryTerm resources in a Glossary. + + Returns: + Callable[[~.ListGlossaryTermsRequest], + ~.ListGlossaryTermsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_glossary_terms" not in self._stubs: + self._stubs["list_glossary_terms"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/ListGlossaryTerms", + request_serializer=business_glossary.ListGlossaryTermsRequest.serialize, + response_deserializer=business_glossary.ListGlossaryTermsResponse.deserialize, + ) + return self._stubs["list_glossary_terms"] + + def close(self): + self._logged_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("BusinessGlossaryServiceGrpcTransport",) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/grpc_asyncio.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..76eb49de5442 --- /dev/null +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/grpc_asyncio.py @@ -0,0 +1,1021 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import logging as std_logging +import pickle +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message +import grpc # type: ignore +from grpc.experimental import aio # type: ignore +import proto # type: ignore + +from google.cloud.dataplex_v1.types import business_glossary + +from .base import DEFAULT_CLIENT_INFO, BusinessGlossaryServiceTransport +from .grpc import BusinessGlossaryServiceGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class BusinessGlossaryServiceGrpcAsyncIOTransport(BusinessGlossaryServiceTransport): + """gRPC AsyncIO backend transport for BusinessGlossaryService. + + BusinessGlossaryService provides APIs for managing business + glossary resources for enterprise customers. + The resources currently supported in Business Glossary are: + + 1. Glossary + 2. GlossaryCategory + 3. GlossaryTerm + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "dataplex.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "dataplex.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_glossary( + self, + ) -> Callable[ + [business_glossary.CreateGlossaryRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create glossary method over gRPC. + + Creates a new Glossary resource. + + Returns: + Callable[[~.CreateGlossaryRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_glossary" not in self._stubs: + self._stubs["create_glossary"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/CreateGlossary", + request_serializer=business_glossary.CreateGlossaryRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_glossary"] + + @property + def update_glossary( + self, + ) -> Callable[ + [business_glossary.UpdateGlossaryRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update glossary method over gRPC. + + Updates a Glossary resource. + + Returns: + Callable[[~.UpdateGlossaryRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_glossary" not in self._stubs: + self._stubs["update_glossary"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/UpdateGlossary", + request_serializer=business_glossary.UpdateGlossaryRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_glossary"] + + @property + def delete_glossary( + self, + ) -> Callable[ + [business_glossary.DeleteGlossaryRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete glossary method over gRPC. + + Deletes a Glossary resource. All the categories and + terms within the Glossary must be deleted before the + Glossary can be deleted. + + Returns: + Callable[[~.DeleteGlossaryRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_glossary" not in self._stubs: + self._stubs["delete_glossary"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/DeleteGlossary", + request_serializer=business_glossary.DeleteGlossaryRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_glossary"] + + @property + def get_glossary( + self, + ) -> Callable[ + [business_glossary.GetGlossaryRequest], Awaitable[business_glossary.Glossary] + ]: + r"""Return a callable for the get glossary method over gRPC. + + Gets a Glossary resource. + + Returns: + Callable[[~.GetGlossaryRequest], + Awaitable[~.Glossary]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_glossary" not in self._stubs: + self._stubs["get_glossary"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/GetGlossary", + request_serializer=business_glossary.GetGlossaryRequest.serialize, + response_deserializer=business_glossary.Glossary.deserialize, + ) + return self._stubs["get_glossary"] + + @property + def list_glossaries( + self, + ) -> Callable[ + [business_glossary.ListGlossariesRequest], + Awaitable[business_glossary.ListGlossariesResponse], + ]: + r"""Return a callable for the list glossaries method over gRPC. + + Lists Glossary resources in a project and location. + + Returns: + Callable[[~.ListGlossariesRequest], + Awaitable[~.ListGlossariesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_glossaries" not in self._stubs: + self._stubs["list_glossaries"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/ListGlossaries", + request_serializer=business_glossary.ListGlossariesRequest.serialize, + response_deserializer=business_glossary.ListGlossariesResponse.deserialize, + ) + return self._stubs["list_glossaries"] + + @property + def create_glossary_category( + self, + ) -> Callable[ + [business_glossary.CreateGlossaryCategoryRequest], + Awaitable[business_glossary.GlossaryCategory], + ]: + r"""Return a callable for the create glossary category method over gRPC. + + Creates a new GlossaryCategory resource. + + Returns: + Callable[[~.CreateGlossaryCategoryRequest], + Awaitable[~.GlossaryCategory]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_glossary_category" not in self._stubs: + self._stubs["create_glossary_category"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/CreateGlossaryCategory", + request_serializer=business_glossary.CreateGlossaryCategoryRequest.serialize, + response_deserializer=business_glossary.GlossaryCategory.deserialize, + ) + return self._stubs["create_glossary_category"] + + @property + def update_glossary_category( + self, + ) -> Callable[ + [business_glossary.UpdateGlossaryCategoryRequest], + Awaitable[business_glossary.GlossaryCategory], + ]: + r"""Return a callable for the update glossary category method over gRPC. + + Updates a GlossaryCategory resource. + + Returns: + Callable[[~.UpdateGlossaryCategoryRequest], + Awaitable[~.GlossaryCategory]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_glossary_category" not in self._stubs: + self._stubs["update_glossary_category"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/UpdateGlossaryCategory", + request_serializer=business_glossary.UpdateGlossaryCategoryRequest.serialize, + response_deserializer=business_glossary.GlossaryCategory.deserialize, + ) + return self._stubs["update_glossary_category"] + + @property + def delete_glossary_category( + self, + ) -> Callable[ + [business_glossary.DeleteGlossaryCategoryRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete glossary category method over gRPC. + + Deletes a GlossaryCategory resource. All the + GlossaryCategories and GlossaryTerms nested directly + under the specified GlossaryCategory will be moved one + level up to the parent in the hierarchy. + + Returns: + Callable[[~.DeleteGlossaryCategoryRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_glossary_category" not in self._stubs: + self._stubs["delete_glossary_category"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/DeleteGlossaryCategory", + request_serializer=business_glossary.DeleteGlossaryCategoryRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_glossary_category"] + + @property + def get_glossary_category( + self, + ) -> Callable[ + [business_glossary.GetGlossaryCategoryRequest], + Awaitable[business_glossary.GlossaryCategory], + ]: + r"""Return a callable for the get glossary category method over gRPC. + + Gets a GlossaryCategory resource. + + Returns: + Callable[[~.GetGlossaryCategoryRequest], + Awaitable[~.GlossaryCategory]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_glossary_category" not in self._stubs: + self._stubs["get_glossary_category"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/GetGlossaryCategory", + request_serializer=business_glossary.GetGlossaryCategoryRequest.serialize, + response_deserializer=business_glossary.GlossaryCategory.deserialize, + ) + return self._stubs["get_glossary_category"] + + @property + def list_glossary_categories( + self, + ) -> Callable[ + [business_glossary.ListGlossaryCategoriesRequest], + Awaitable[business_glossary.ListGlossaryCategoriesResponse], + ]: + r"""Return a callable for the list glossary categories method over gRPC. + + Lists GlossaryCategory resources in a Glossary. + + Returns: + Callable[[~.ListGlossaryCategoriesRequest], + Awaitable[~.ListGlossaryCategoriesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_glossary_categories" not in self._stubs: + self._stubs["list_glossary_categories"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/ListGlossaryCategories", + request_serializer=business_glossary.ListGlossaryCategoriesRequest.serialize, + response_deserializer=business_glossary.ListGlossaryCategoriesResponse.deserialize, + ) + return self._stubs["list_glossary_categories"] + + @property + def create_glossary_term( + self, + ) -> Callable[ + [business_glossary.CreateGlossaryTermRequest], + Awaitable[business_glossary.GlossaryTerm], + ]: + r"""Return a callable for the create glossary term method over gRPC. + + Creates a new GlossaryTerm resource. + + Returns: + Callable[[~.CreateGlossaryTermRequest], + Awaitable[~.GlossaryTerm]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_glossary_term" not in self._stubs: + self._stubs["create_glossary_term"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/CreateGlossaryTerm", + request_serializer=business_glossary.CreateGlossaryTermRequest.serialize, + response_deserializer=business_glossary.GlossaryTerm.deserialize, + ) + return self._stubs["create_glossary_term"] + + @property + def update_glossary_term( + self, + ) -> Callable[ + [business_glossary.UpdateGlossaryTermRequest], + Awaitable[business_glossary.GlossaryTerm], + ]: + r"""Return a callable for the update glossary term method over gRPC. + + Updates a GlossaryTerm resource. + + Returns: + Callable[[~.UpdateGlossaryTermRequest], + Awaitable[~.GlossaryTerm]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_glossary_term" not in self._stubs: + self._stubs["update_glossary_term"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/UpdateGlossaryTerm", + request_serializer=business_glossary.UpdateGlossaryTermRequest.serialize, + response_deserializer=business_glossary.GlossaryTerm.deserialize, + ) + return self._stubs["update_glossary_term"] + + @property + def delete_glossary_term( + self, + ) -> Callable[ + [business_glossary.DeleteGlossaryTermRequest], Awaitable[empty_pb2.Empty] + ]: + r"""Return a callable for the delete glossary term method over gRPC. + + Deletes a GlossaryTerm resource. + + Returns: + Callable[[~.DeleteGlossaryTermRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_glossary_term" not in self._stubs: + self._stubs["delete_glossary_term"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/DeleteGlossaryTerm", + request_serializer=business_glossary.DeleteGlossaryTermRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_glossary_term"] + + @property + def get_glossary_term( + self, + ) -> Callable[ + [business_glossary.GetGlossaryTermRequest], + Awaitable[business_glossary.GlossaryTerm], + ]: + r"""Return a callable for the get glossary term method over gRPC. + + Gets a GlossaryTerm resource. + + Returns: + Callable[[~.GetGlossaryTermRequest], + Awaitable[~.GlossaryTerm]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_glossary_term" not in self._stubs: + self._stubs["get_glossary_term"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/GetGlossaryTerm", + request_serializer=business_glossary.GetGlossaryTermRequest.serialize, + response_deserializer=business_glossary.GlossaryTerm.deserialize, + ) + return self._stubs["get_glossary_term"] + + @property + def list_glossary_terms( + self, + ) -> Callable[ + [business_glossary.ListGlossaryTermsRequest], + Awaitable[business_glossary.ListGlossaryTermsResponse], + ]: + r"""Return a callable for the list glossary terms method over gRPC. + + Lists GlossaryTerm resources in a Glossary. + + Returns: + Callable[[~.ListGlossaryTermsRequest], + Awaitable[~.ListGlossaryTermsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_glossary_terms" not in self._stubs: + self._stubs["list_glossary_terms"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.BusinessGlossaryService/ListGlossaryTerms", + request_serializer=business_glossary.ListGlossaryTermsRequest.serialize, + response_deserializer=business_glossary.ListGlossaryTermsResponse.deserialize, + ) + return self._stubs["list_glossary_terms"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_glossary: self._wrap_method( + self.create_glossary, + default_timeout=None, + client_info=client_info, + ), + self.update_glossary: self._wrap_method( + self.update_glossary, + default_timeout=None, + client_info=client_info, + ), + self.delete_glossary: self._wrap_method( + self.delete_glossary, + default_timeout=None, + client_info=client_info, + ), + self.get_glossary: self._wrap_method( + self.get_glossary, + default_timeout=None, + client_info=client_info, + ), + self.list_glossaries: self._wrap_method( + self.list_glossaries, + default_timeout=None, + client_info=client_info, + ), + self.create_glossary_category: self._wrap_method( + self.create_glossary_category, + default_timeout=None, + client_info=client_info, + ), + self.update_glossary_category: self._wrap_method( + self.update_glossary_category, + default_timeout=None, + client_info=client_info, + ), + self.delete_glossary_category: self._wrap_method( + self.delete_glossary_category, + default_timeout=None, + client_info=client_info, + ), + self.get_glossary_category: self._wrap_method( + self.get_glossary_category, + default_timeout=None, + client_info=client_info, + ), + self.list_glossary_categories: self._wrap_method( + self.list_glossary_categories, + default_timeout=None, + client_info=client_info, + ), + self.create_glossary_term: self._wrap_method( + self.create_glossary_term, + default_timeout=None, + client_info=client_info, + ), + self.update_glossary_term: self._wrap_method( + self.update_glossary_term, + default_timeout=None, + client_info=client_info, + ), + self.delete_glossary_term: self._wrap_method( + self.delete_glossary_term, + default_timeout=None, + client_info=client_info, + ), + self.get_glossary_term: self._wrap_method( + self.get_glossary_term, + default_timeout=None, + client_info=client_info, + ), + self.list_glossary_terms: self._wrap_method( + self.list_glossary_terms, + default_timeout=None, + client_info=client_info, + ), + self.get_location: self._wrap_method( + self.get_location, + default_timeout=None, + client_info=client_info, + ), + self.list_locations: self._wrap_method( + self.list_locations, + default_timeout=None, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def list_locations( + self, + ) -> Callable[ + [locations_pb2.ListLocationsRequest], locations_pb2.ListLocationsResponse + ]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_locations" not in self._stubs: + self._stubs["list_locations"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/ListLocations", + request_serializer=locations_pb2.ListLocationsRequest.SerializeToString, + response_deserializer=locations_pb2.ListLocationsResponse.FromString, + ) + return self._stubs["list_locations"] + + @property + def get_location( + self, + ) -> Callable[[locations_pb2.GetLocationRequest], locations_pb2.Location]: + r"""Return a callable for the list locations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_location" not in self._stubs: + self._stubs["get_location"] = self._logged_channel.unary_unary( + "/google.cloud.location.Locations/GetLocation", + request_serializer=locations_pb2.GetLocationRequest.SerializeToString, + response_deserializer=locations_pb2.Location.FromString, + ) + return self._stubs["get_location"] + + +__all__ = ("BusinessGlossaryServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/rest.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/rest.py new file mode 100644 index 000000000000..6ef1c9c485a7 --- /dev/null +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/rest.py @@ -0,0 +1,4373 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import dataclasses +import json # type: ignore +import logging +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +from google.api_core import gapic_v1, operations_v1, rest_helpers, rest_streaming +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import google.protobuf +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format +from requests import __version__ as requests_version + +from google.cloud.dataplex_v1.types import business_glossary + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseBusinessGlossaryServiceRestTransport + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class BusinessGlossaryServiceRestInterceptor: + """Interceptor for BusinessGlossaryService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the BusinessGlossaryServiceRestTransport. + + .. code-block:: python + class MyCustomBusinessGlossaryServiceInterceptor(BusinessGlossaryServiceRestInterceptor): + def pre_create_glossary(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_glossary(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_glossary_category(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_glossary_category(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_glossary_term(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_glossary_term(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_glossary(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_glossary(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_glossary_category(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_glossary_term(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_glossary(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_glossary(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_glossary_category(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_glossary_category(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_glossary_term(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_glossary_term(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_glossaries(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_glossaries(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_glossary_categories(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_glossary_categories(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_glossary_terms(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_glossary_terms(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_glossary(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_glossary(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_glossary_category(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_glossary_category(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_glossary_term(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_glossary_term(self, response): + logging.log(f"Received response: {response}") + return response + + transport = BusinessGlossaryServiceRestTransport(interceptor=MyCustomBusinessGlossaryServiceInterceptor()) + client = BusinessGlossaryServiceClient(transport=transport) + + + """ + + def pre_create_glossary( + self, + request: business_glossary.CreateGlossaryRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.CreateGlossaryRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for create_glossary + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_create_glossary( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_glossary + + DEPRECATED. Please use the `post_create_glossary_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. This `post_create_glossary` interceptor runs + before the `post_create_glossary_with_metadata` interceptor. + """ + return response + + def post_create_glossary_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_glossary + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BusinessGlossaryService server but before it is returned to user code. + + We recommend only using this `post_create_glossary_with_metadata` + interceptor in new development instead of the `post_create_glossary` interceptor. + When both interceptors are used, this `post_create_glossary_with_metadata` interceptor runs after the + `post_create_glossary` interceptor. The (possibly modified) response returned by + `post_create_glossary` will be passed to + `post_create_glossary_with_metadata`. + """ + return response, metadata + + def pre_create_glossary_category( + self, + request: business_glossary.CreateGlossaryCategoryRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.CreateGlossaryCategoryRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for create_glossary_category + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_create_glossary_category( + self, response: business_glossary.GlossaryCategory + ) -> business_glossary.GlossaryCategory: + """Post-rpc interceptor for create_glossary_category + + DEPRECATED. Please use the `post_create_glossary_category_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. This `post_create_glossary_category` interceptor runs + before the `post_create_glossary_category_with_metadata` interceptor. + """ + return response + + def post_create_glossary_category_with_metadata( + self, + response: business_glossary.GlossaryCategory, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.GlossaryCategory, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for create_glossary_category + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BusinessGlossaryService server but before it is returned to user code. + + We recommend only using this `post_create_glossary_category_with_metadata` + interceptor in new development instead of the `post_create_glossary_category` interceptor. + When both interceptors are used, this `post_create_glossary_category_with_metadata` interceptor runs after the + `post_create_glossary_category` interceptor. The (possibly modified) response returned by + `post_create_glossary_category` will be passed to + `post_create_glossary_category_with_metadata`. + """ + return response, metadata + + def pre_create_glossary_term( + self, + request: business_glossary.CreateGlossaryTermRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.CreateGlossaryTermRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for create_glossary_term + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_create_glossary_term( + self, response: business_glossary.GlossaryTerm + ) -> business_glossary.GlossaryTerm: + """Post-rpc interceptor for create_glossary_term + + DEPRECATED. Please use the `post_create_glossary_term_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. This `post_create_glossary_term` interceptor runs + before the `post_create_glossary_term_with_metadata` interceptor. + """ + return response + + def post_create_glossary_term_with_metadata( + self, + response: business_glossary.GlossaryTerm, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[business_glossary.GlossaryTerm, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_glossary_term + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BusinessGlossaryService server but before it is returned to user code. + + We recommend only using this `post_create_glossary_term_with_metadata` + interceptor in new development instead of the `post_create_glossary_term` interceptor. + When both interceptors are used, this `post_create_glossary_term_with_metadata` interceptor runs after the + `post_create_glossary_term` interceptor. The (possibly modified) response returned by + `post_create_glossary_term` will be passed to + `post_create_glossary_term_with_metadata`. + """ + return response, metadata + + def pre_delete_glossary( + self, + request: business_glossary.DeleteGlossaryRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.DeleteGlossaryRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for delete_glossary + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_delete_glossary( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_glossary + + DEPRECATED. Please use the `post_delete_glossary_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. This `post_delete_glossary` interceptor runs + before the `post_delete_glossary_with_metadata` interceptor. + """ + return response + + def post_delete_glossary_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_glossary + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BusinessGlossaryService server but before it is returned to user code. + + We recommend only using this `post_delete_glossary_with_metadata` + interceptor in new development instead of the `post_delete_glossary` interceptor. + When both interceptors are used, this `post_delete_glossary_with_metadata` interceptor runs after the + `post_delete_glossary` interceptor. The (possibly modified) response returned by + `post_delete_glossary` will be passed to + `post_delete_glossary_with_metadata`. + """ + return response, metadata + + def pre_delete_glossary_category( + self, + request: business_glossary.DeleteGlossaryCategoryRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.DeleteGlossaryCategoryRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for delete_glossary_category + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def pre_delete_glossary_term( + self, + request: business_glossary.DeleteGlossaryTermRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.DeleteGlossaryTermRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for delete_glossary_term + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def pre_get_glossary( + self, + request: business_glossary.GetGlossaryRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.GetGlossaryRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_glossary + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_get_glossary( + self, response: business_glossary.Glossary + ) -> business_glossary.Glossary: + """Post-rpc interceptor for get_glossary + + DEPRECATED. Please use the `post_get_glossary_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. This `post_get_glossary` interceptor runs + before the `post_get_glossary_with_metadata` interceptor. + """ + return response + + def post_get_glossary_with_metadata( + self, + response: business_glossary.Glossary, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[business_glossary.Glossary, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_glossary + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BusinessGlossaryService server but before it is returned to user code. + + We recommend only using this `post_get_glossary_with_metadata` + interceptor in new development instead of the `post_get_glossary` interceptor. + When both interceptors are used, this `post_get_glossary_with_metadata` interceptor runs after the + `post_get_glossary` interceptor. The (possibly modified) response returned by + `post_get_glossary` will be passed to + `post_get_glossary_with_metadata`. + """ + return response, metadata + + def pre_get_glossary_category( + self, + request: business_glossary.GetGlossaryCategoryRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.GetGlossaryCategoryRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_glossary_category + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_get_glossary_category( + self, response: business_glossary.GlossaryCategory + ) -> business_glossary.GlossaryCategory: + """Post-rpc interceptor for get_glossary_category + + DEPRECATED. Please use the `post_get_glossary_category_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. This `post_get_glossary_category` interceptor runs + before the `post_get_glossary_category_with_metadata` interceptor. + """ + return response + + def post_get_glossary_category_with_metadata( + self, + response: business_glossary.GlossaryCategory, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.GlossaryCategory, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_glossary_category + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BusinessGlossaryService server but before it is returned to user code. + + We recommend only using this `post_get_glossary_category_with_metadata` + interceptor in new development instead of the `post_get_glossary_category` interceptor. + When both interceptors are used, this `post_get_glossary_category_with_metadata` interceptor runs after the + `post_get_glossary_category` interceptor. The (possibly modified) response returned by + `post_get_glossary_category` will be passed to + `post_get_glossary_category_with_metadata`. + """ + return response, metadata + + def pre_get_glossary_term( + self, + request: business_glossary.GetGlossaryTermRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.GetGlossaryTermRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_glossary_term + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_get_glossary_term( + self, response: business_glossary.GlossaryTerm + ) -> business_glossary.GlossaryTerm: + """Post-rpc interceptor for get_glossary_term + + DEPRECATED. Please use the `post_get_glossary_term_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. This `post_get_glossary_term` interceptor runs + before the `post_get_glossary_term_with_metadata` interceptor. + """ + return response + + def post_get_glossary_term_with_metadata( + self, + response: business_glossary.GlossaryTerm, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[business_glossary.GlossaryTerm, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_glossary_term + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BusinessGlossaryService server but before it is returned to user code. + + We recommend only using this `post_get_glossary_term_with_metadata` + interceptor in new development instead of the `post_get_glossary_term` interceptor. + When both interceptors are used, this `post_get_glossary_term_with_metadata` interceptor runs after the + `post_get_glossary_term` interceptor. The (possibly modified) response returned by + `post_get_glossary_term` will be passed to + `post_get_glossary_term_with_metadata`. + """ + return response, metadata + + def pre_list_glossaries( + self, + request: business_glossary.ListGlossariesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.ListGlossariesRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_glossaries + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_list_glossaries( + self, response: business_glossary.ListGlossariesResponse + ) -> business_glossary.ListGlossariesResponse: + """Post-rpc interceptor for list_glossaries + + DEPRECATED. Please use the `post_list_glossaries_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. This `post_list_glossaries` interceptor runs + before the `post_list_glossaries_with_metadata` interceptor. + """ + return response + + def post_list_glossaries_with_metadata( + self, + response: business_glossary.ListGlossariesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.ListGlossariesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_glossaries + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BusinessGlossaryService server but before it is returned to user code. + + We recommend only using this `post_list_glossaries_with_metadata` + interceptor in new development instead of the `post_list_glossaries` interceptor. + When both interceptors are used, this `post_list_glossaries_with_metadata` interceptor runs after the + `post_list_glossaries` interceptor. The (possibly modified) response returned by + `post_list_glossaries` will be passed to + `post_list_glossaries_with_metadata`. + """ + return response, metadata + + def pre_list_glossary_categories( + self, + request: business_glossary.ListGlossaryCategoriesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.ListGlossaryCategoriesRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_glossary_categories + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_list_glossary_categories( + self, response: business_glossary.ListGlossaryCategoriesResponse + ) -> business_glossary.ListGlossaryCategoriesResponse: + """Post-rpc interceptor for list_glossary_categories + + DEPRECATED. Please use the `post_list_glossary_categories_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. This `post_list_glossary_categories` interceptor runs + before the `post_list_glossary_categories_with_metadata` interceptor. + """ + return response + + def post_list_glossary_categories_with_metadata( + self, + response: business_glossary.ListGlossaryCategoriesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.ListGlossaryCategoriesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_glossary_categories + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BusinessGlossaryService server but before it is returned to user code. + + We recommend only using this `post_list_glossary_categories_with_metadata` + interceptor in new development instead of the `post_list_glossary_categories` interceptor. + When both interceptors are used, this `post_list_glossary_categories_with_metadata` interceptor runs after the + `post_list_glossary_categories` interceptor. The (possibly modified) response returned by + `post_list_glossary_categories` will be passed to + `post_list_glossary_categories_with_metadata`. + """ + return response, metadata + + def pre_list_glossary_terms( + self, + request: business_glossary.ListGlossaryTermsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.ListGlossaryTermsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_glossary_terms + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_list_glossary_terms( + self, response: business_glossary.ListGlossaryTermsResponse + ) -> business_glossary.ListGlossaryTermsResponse: + """Post-rpc interceptor for list_glossary_terms + + DEPRECATED. Please use the `post_list_glossary_terms_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. This `post_list_glossary_terms` interceptor runs + before the `post_list_glossary_terms_with_metadata` interceptor. + """ + return response + + def post_list_glossary_terms_with_metadata( + self, + response: business_glossary.ListGlossaryTermsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.ListGlossaryTermsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_glossary_terms + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BusinessGlossaryService server but before it is returned to user code. + + We recommend only using this `post_list_glossary_terms_with_metadata` + interceptor in new development instead of the `post_list_glossary_terms` interceptor. + When both interceptors are used, this `post_list_glossary_terms_with_metadata` interceptor runs after the + `post_list_glossary_terms` interceptor. The (possibly modified) response returned by + `post_list_glossary_terms` will be passed to + `post_list_glossary_terms_with_metadata`. + """ + return response, metadata + + def pre_update_glossary( + self, + request: business_glossary.UpdateGlossaryRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.UpdateGlossaryRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for update_glossary + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_update_glossary( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_glossary + + DEPRECATED. Please use the `post_update_glossary_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. This `post_update_glossary` interceptor runs + before the `post_update_glossary_with_metadata` interceptor. + """ + return response + + def post_update_glossary_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_glossary + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BusinessGlossaryService server but before it is returned to user code. + + We recommend only using this `post_update_glossary_with_metadata` + interceptor in new development instead of the `post_update_glossary` interceptor. + When both interceptors are used, this `post_update_glossary_with_metadata` interceptor runs after the + `post_update_glossary` interceptor. The (possibly modified) response returned by + `post_update_glossary` will be passed to + `post_update_glossary_with_metadata`. + """ + return response, metadata + + def pre_update_glossary_category( + self, + request: business_glossary.UpdateGlossaryCategoryRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.UpdateGlossaryCategoryRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for update_glossary_category + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_update_glossary_category( + self, response: business_glossary.GlossaryCategory + ) -> business_glossary.GlossaryCategory: + """Post-rpc interceptor for update_glossary_category + + DEPRECATED. Please use the `post_update_glossary_category_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. This `post_update_glossary_category` interceptor runs + before the `post_update_glossary_category_with_metadata` interceptor. + """ + return response + + def post_update_glossary_category_with_metadata( + self, + response: business_glossary.GlossaryCategory, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.GlossaryCategory, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for update_glossary_category + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BusinessGlossaryService server but before it is returned to user code. + + We recommend only using this `post_update_glossary_category_with_metadata` + interceptor in new development instead of the `post_update_glossary_category` interceptor. + When both interceptors are used, this `post_update_glossary_category_with_metadata` interceptor runs after the + `post_update_glossary_category` interceptor. The (possibly modified) response returned by + `post_update_glossary_category` will be passed to + `post_update_glossary_category_with_metadata`. + """ + return response, metadata + + def pre_update_glossary_term( + self, + request: business_glossary.UpdateGlossaryTermRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + business_glossary.UpdateGlossaryTermRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for update_glossary_term + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_update_glossary_term( + self, response: business_glossary.GlossaryTerm + ) -> business_glossary.GlossaryTerm: + """Post-rpc interceptor for update_glossary_term + + DEPRECATED. Please use the `post_update_glossary_term_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. This `post_update_glossary_term` interceptor runs + before the `post_update_glossary_term_with_metadata` interceptor. + """ + return response + + def post_update_glossary_term_with_metadata( + self, + response: business_glossary.GlossaryTerm, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[business_glossary.GlossaryTerm, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_glossary_term + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BusinessGlossaryService server but before it is returned to user code. + + We recommend only using this `post_update_glossary_term_with_metadata` + interceptor in new development instead of the `post_update_glossary_term` interceptor. + When both interceptors are used, this `post_update_glossary_term_with_metadata` interceptor runs after the + `post_update_glossary_term` interceptor. The (possibly modified) response returned by + `post_update_glossary_term` will be passed to + `post_update_glossary_term_with_metadata`. + """ + return response, metadata + + def pre_get_location( + self, + request: locations_pb2.GetLocationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.GetLocationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_location + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_get_location( + self, response: locations_pb2.Location + ) -> locations_pb2.Location: + """Post-rpc interceptor for get_location + + Override in a subclass to manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. + """ + return response + + def pre_list_locations( + self, + request: locations_pb2.ListLocationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + locations_pb2.ListLocationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_locations + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_list_locations( + self, response: locations_pb2.ListLocationsResponse + ) -> locations_pb2.ListLocationsResponse: + """Post-rpc interceptor for list_locations + + Override in a subclass to manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. + """ + return response + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the BusinessGlossaryService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the BusinessGlossaryService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class BusinessGlossaryServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: BusinessGlossaryServiceRestInterceptor + + +class BusinessGlossaryServiceRestTransport(_BaseBusinessGlossaryServiceRestTransport): + """REST backend synchronous transport for BusinessGlossaryService. + + BusinessGlossaryService provides APIs for managing business + glossary resources for enterprise customers. + The resources currently supported in Business Glossary are: + + 1. Glossary + 2. GlossaryCategory + 3. GlossaryTerm + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "dataplex.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[BusinessGlossaryServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or BusinessGlossaryServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=organizations/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.DeleteOperation": [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "delete", + "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + }, + ], + "google.longrunning.Operations.ListOperations": [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*}/operations", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateGlossary( + _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossary, + BusinessGlossaryServiceRestStub, + ): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.CreateGlossary") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: business_glossary.CreateGlossaryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the create glossary method over HTTP. + + Args: + request (~.business_glossary.CreateGlossaryRequest): + The request object. Create Glossary Request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossary._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_glossary(request, metadata) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossary._get_transcoded_request( + http_options, request + ) + + body = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossary._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossary._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.CreateGlossary", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "CreateGlossary", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + BusinessGlossaryServiceRestTransport._CreateGlossary._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_glossary(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_glossary_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.create_glossary", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "CreateGlossary", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateGlossaryCategory( + _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryCategory, + BusinessGlossaryServiceRestStub, + ): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.CreateGlossaryCategory") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: business_glossary.CreateGlossaryCategoryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryCategory: + r"""Call the create glossary category method over HTTP. + + Args: + request (~.business_glossary.CreateGlossaryCategoryRequest): + The request object. Creates a new GlossaryCategory under + the specified Glossary. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.business_glossary.GlossaryCategory: + A GlossaryCategory represents a + collection of GlossaryCategories and + GlossaryTerms within a Glossary that are + related to each other. + + """ + + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryCategory._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_glossary_category( + request, metadata + ) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryCategory._get_transcoded_request( + http_options, request + ) + + body = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryCategory._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryCategory._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.CreateGlossaryCategory", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "CreateGlossaryCategory", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BusinessGlossaryServiceRestTransport._CreateGlossaryCategory._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = business_glossary.GlossaryCategory() + pb_resp = business_glossary.GlossaryCategory.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_glossary_category(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_glossary_category_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = business_glossary.GlossaryCategory.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.create_glossary_category", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "CreateGlossaryCategory", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateGlossaryTerm( + _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryTerm, + BusinessGlossaryServiceRestStub, + ): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.CreateGlossaryTerm") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: business_glossary.CreateGlossaryTermRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryTerm: + r"""Call the create glossary term method over HTTP. + + Args: + request (~.business_glossary.CreateGlossaryTermRequest): + The request object. Creates a new GlossaryTerm under the + specified Glossary. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.business_glossary.GlossaryTerm: + GlossaryTerms are the core of + Glossary. A GlossaryTerm holds a rich + text description that can be attached to + Entries or specific columns to enrich + them. + + """ + + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryTerm._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_glossary_term( + request, metadata + ) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryTerm._get_transcoded_request( + http_options, request + ) + + body = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryTerm._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryTerm._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.CreateGlossaryTerm", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "CreateGlossaryTerm", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + BusinessGlossaryServiceRestTransport._CreateGlossaryTerm._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = business_glossary.GlossaryTerm() + pb_resp = business_glossary.GlossaryTerm.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_glossary_term(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_glossary_term_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = business_glossary.GlossaryTerm.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.create_glossary_term", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "CreateGlossaryTerm", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteGlossary( + _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossary, + BusinessGlossaryServiceRestStub, + ): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.DeleteGlossary") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: business_glossary.DeleteGlossaryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete glossary method over HTTP. + + Args: + request (~.business_glossary.DeleteGlossaryRequest): + The request object. Delete Glossary Request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossary._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_glossary(request, metadata) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossary._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossary._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.DeleteGlossary", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "DeleteGlossary", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + BusinessGlossaryServiceRestTransport._DeleteGlossary._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_glossary(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_glossary_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.delete_glossary", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "DeleteGlossary", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteGlossaryCategory( + _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryCategory, + BusinessGlossaryServiceRestStub, + ): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.DeleteGlossaryCategory") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: business_glossary.DeleteGlossaryCategoryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + r"""Call the delete glossary category method over HTTP. + + Args: + request (~.business_glossary.DeleteGlossaryCategoryRequest): + The request object. Delete GlossaryCategory Request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryCategory._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_glossary_category( + request, metadata + ) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryCategory._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryCategory._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.DeleteGlossaryCategory", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "DeleteGlossaryCategory", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BusinessGlossaryServiceRestTransport._DeleteGlossaryCategory._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteGlossaryTerm( + _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryTerm, + BusinessGlossaryServiceRestStub, + ): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.DeleteGlossaryTerm") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: business_glossary.DeleteGlossaryTermRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + r"""Call the delete glossary term method over HTTP. + + Args: + request (~.business_glossary.DeleteGlossaryTermRequest): + The request object. Delete GlossaryTerm Request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryTerm._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_glossary_term( + request, metadata + ) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryTerm._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryTerm._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.DeleteGlossaryTerm", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "DeleteGlossaryTerm", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + BusinessGlossaryServiceRestTransport._DeleteGlossaryTerm._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetGlossary( + _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossary, + BusinessGlossaryServiceRestStub, + ): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.GetGlossary") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: business_glossary.GetGlossaryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.Glossary: + r"""Call the get glossary method over HTTP. + + Args: + request (~.business_glossary.GetGlossaryRequest): + The request object. Get Glossary Request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.business_glossary.Glossary: + A Glossary represents a collection of + GlossaryCategories and GlossaryTerms + defined by the user. Glossary is a top + level resource and is the Google Cloud + parent resource of all the + GlossaryCategories and GlossaryTerms + within it. + + """ + + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossary._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_glossary(request, metadata) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossary._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossary._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.GetGlossary", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "GetGlossary", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BusinessGlossaryServiceRestTransport._GetGlossary._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = business_glossary.Glossary() + pb_resp = business_glossary.Glossary.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_glossary(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_glossary_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = business_glossary.Glossary.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.get_glossary", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "GetGlossary", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetGlossaryCategory( + _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryCategory, + BusinessGlossaryServiceRestStub, + ): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.GetGlossaryCategory") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: business_glossary.GetGlossaryCategoryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryCategory: + r"""Call the get glossary category method over HTTP. + + Args: + request (~.business_glossary.GetGlossaryCategoryRequest): + The request object. Get GlossaryCategory Request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.business_glossary.GlossaryCategory: + A GlossaryCategory represents a + collection of GlossaryCategories and + GlossaryTerms within a Glossary that are + related to each other. + + """ + + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryCategory._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_glossary_category( + request, metadata + ) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryCategory._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryCategory._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.GetGlossaryCategory", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "GetGlossaryCategory", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + BusinessGlossaryServiceRestTransport._GetGlossaryCategory._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = business_glossary.GlossaryCategory() + pb_resp = business_glossary.GlossaryCategory.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_glossary_category(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_glossary_category_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = business_glossary.GlossaryCategory.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.get_glossary_category", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "GetGlossaryCategory", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetGlossaryTerm( + _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryTerm, + BusinessGlossaryServiceRestStub, + ): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.GetGlossaryTerm") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: business_glossary.GetGlossaryTermRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryTerm: + r"""Call the get glossary term method over HTTP. + + Args: + request (~.business_glossary.GetGlossaryTermRequest): + The request object. Get GlossaryTerm Request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.business_glossary.GlossaryTerm: + GlossaryTerms are the core of + Glossary. A GlossaryTerm holds a rich + text description that can be attached to + Entries or specific columns to enrich + them. + + """ + + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryTerm._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_glossary_term( + request, metadata + ) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryTerm._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryTerm._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.GetGlossaryTerm", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "GetGlossaryTerm", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + BusinessGlossaryServiceRestTransport._GetGlossaryTerm._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = business_glossary.GlossaryTerm() + pb_resp = business_glossary.GlossaryTerm.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_glossary_term(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_glossary_term_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = business_glossary.GlossaryTerm.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.get_glossary_term", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "GetGlossaryTerm", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListGlossaries( + _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaries, + BusinessGlossaryServiceRestStub, + ): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.ListGlossaries") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: business_glossary.ListGlossariesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.ListGlossariesResponse: + r"""Call the list glossaries method over HTTP. + + Args: + request (~.business_glossary.ListGlossariesRequest): + The request object. List Glossaries Request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.business_glossary.ListGlossariesResponse: + List Glossaries Response + """ + + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaries._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_glossaries(request, metadata) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaries._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaries._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.ListGlossaries", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "ListGlossaries", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + BusinessGlossaryServiceRestTransport._ListGlossaries._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = business_glossary.ListGlossariesResponse() + pb_resp = business_glossary.ListGlossariesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_glossaries(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_glossaries_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = business_glossary.ListGlossariesResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.list_glossaries", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "ListGlossaries", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListGlossaryCategories( + _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryCategories, + BusinessGlossaryServiceRestStub, + ): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.ListGlossaryCategories") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: business_glossary.ListGlossaryCategoriesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.ListGlossaryCategoriesResponse: + r"""Call the list glossary categories method over HTTP. + + Args: + request (~.business_glossary.ListGlossaryCategoriesRequest): + The request object. List GlossaryCategories Request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.business_glossary.ListGlossaryCategoriesResponse: + List GlossaryCategories Response + """ + + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryCategories._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_glossary_categories( + request, metadata + ) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryCategories._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryCategories._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.ListGlossaryCategories", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "ListGlossaryCategories", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BusinessGlossaryServiceRestTransport._ListGlossaryCategories._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = business_glossary.ListGlossaryCategoriesResponse() + pb_resp = business_glossary.ListGlossaryCategoriesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_glossary_categories(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_glossary_categories_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + business_glossary.ListGlossaryCategoriesResponse.to_json( + response + ) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.list_glossary_categories", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "ListGlossaryCategories", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListGlossaryTerms( + _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryTerms, + BusinessGlossaryServiceRestStub, + ): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.ListGlossaryTerms") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: business_glossary.ListGlossaryTermsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.ListGlossaryTermsResponse: + r"""Call the list glossary terms method over HTTP. + + Args: + request (~.business_glossary.ListGlossaryTermsRequest): + The request object. List GlossaryTerms Request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.business_glossary.ListGlossaryTermsResponse: + List GlossaryTerms Response + """ + + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryTerms._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_glossary_terms( + request, metadata + ) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryTerms._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryTerms._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.ListGlossaryTerms", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "ListGlossaryTerms", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + BusinessGlossaryServiceRestTransport._ListGlossaryTerms._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = business_glossary.ListGlossaryTermsResponse() + pb_resp = business_glossary.ListGlossaryTermsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_glossary_terms(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_glossary_terms_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + business_glossary.ListGlossaryTermsResponse.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.list_glossary_terms", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "ListGlossaryTerms", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateGlossary( + _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossary, + BusinessGlossaryServiceRestStub, + ): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.UpdateGlossary") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: business_glossary.UpdateGlossaryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the update glossary method over HTTP. + + Args: + request (~.business_glossary.UpdateGlossaryRequest): + The request object. Update Glossary Request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossary._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_glossary(request, metadata) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossary._get_transcoded_request( + http_options, request + ) + + body = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossary._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossary._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.UpdateGlossary", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "UpdateGlossary", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + BusinessGlossaryServiceRestTransport._UpdateGlossary._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_glossary(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_glossary_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.update_glossary", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "UpdateGlossary", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateGlossaryCategory( + _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryCategory, + BusinessGlossaryServiceRestStub, + ): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.UpdateGlossaryCategory") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: business_glossary.UpdateGlossaryCategoryRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryCategory: + r"""Call the update glossary category method over HTTP. + + Args: + request (~.business_glossary.UpdateGlossaryCategoryRequest): + The request object. Update GlossaryCategory Request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.business_glossary.GlossaryCategory: + A GlossaryCategory represents a + collection of GlossaryCategories and + GlossaryTerms within a Glossary that are + related to each other. + + """ + + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryCategory._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_glossary_category( + request, metadata + ) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryCategory._get_transcoded_request( + http_options, request + ) + + body = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryCategory._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryCategory._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.UpdateGlossaryCategory", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "UpdateGlossaryCategory", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BusinessGlossaryServiceRestTransport._UpdateGlossaryCategory._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = business_glossary.GlossaryCategory() + pb_resp = business_glossary.GlossaryCategory.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_glossary_category(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_glossary_category_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = business_glossary.GlossaryCategory.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.update_glossary_category", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "UpdateGlossaryCategory", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateGlossaryTerm( + _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryTerm, + BusinessGlossaryServiceRestStub, + ): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.UpdateGlossaryTerm") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: business_glossary.UpdateGlossaryTermRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> business_glossary.GlossaryTerm: + r"""Call the update glossary term method over HTTP. + + Args: + request (~.business_glossary.UpdateGlossaryTermRequest): + The request object. Update GlossaryTerm Request + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.business_glossary.GlossaryTerm: + GlossaryTerms are the core of + Glossary. A GlossaryTerm holds a rich + text description that can be attached to + Entries or specific columns to enrich + them. + + """ + + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryTerm._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_glossary_term( + request, metadata + ) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryTerm._get_transcoded_request( + http_options, request + ) + + body = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryTerm._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryTerm._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.UpdateGlossaryTerm", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "UpdateGlossaryTerm", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + BusinessGlossaryServiceRestTransport._UpdateGlossaryTerm._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = business_glossary.GlossaryTerm() + pb_resp = business_glossary.GlossaryTerm.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_glossary_term(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_glossary_term_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = business_glossary.GlossaryTerm.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.update_glossary_term", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "UpdateGlossaryTerm", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def create_glossary( + self, + ) -> Callable[[business_glossary.CreateGlossaryRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateGlossary(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_glossary_category( + self, + ) -> Callable[ + [business_glossary.CreateGlossaryCategoryRequest], + business_glossary.GlossaryCategory, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateGlossaryCategory(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_glossary_term( + self, + ) -> Callable[ + [business_glossary.CreateGlossaryTermRequest], business_glossary.GlossaryTerm + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateGlossaryTerm(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_glossary( + self, + ) -> Callable[[business_glossary.DeleteGlossaryRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteGlossary(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_glossary_category( + self, + ) -> Callable[[business_glossary.DeleteGlossaryCategoryRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteGlossaryCategory(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_glossary_term( + self, + ) -> Callable[[business_glossary.DeleteGlossaryTermRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteGlossaryTerm(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_glossary( + self, + ) -> Callable[[business_glossary.GetGlossaryRequest], business_glossary.Glossary]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetGlossary(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_glossary_category( + self, + ) -> Callable[ + [business_glossary.GetGlossaryCategoryRequest], + business_glossary.GlossaryCategory, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetGlossaryCategory(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_glossary_term( + self, + ) -> Callable[ + [business_glossary.GetGlossaryTermRequest], business_glossary.GlossaryTerm + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetGlossaryTerm(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_glossaries( + self, + ) -> Callable[ + [business_glossary.ListGlossariesRequest], + business_glossary.ListGlossariesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListGlossaries(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_glossary_categories( + self, + ) -> Callable[ + [business_glossary.ListGlossaryCategoriesRequest], + business_glossary.ListGlossaryCategoriesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListGlossaryCategories(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_glossary_terms( + self, + ) -> Callable[ + [business_glossary.ListGlossaryTermsRequest], + business_glossary.ListGlossaryTermsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListGlossaryTerms(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_glossary( + self, + ) -> Callable[[business_glossary.UpdateGlossaryRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateGlossary(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_glossary_category( + self, + ) -> Callable[ + [business_glossary.UpdateGlossaryCategoryRequest], + business_glossary.GlossaryCategory, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateGlossaryCategory(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_glossary_term( + self, + ) -> Callable[ + [business_glossary.UpdateGlossaryTermRequest], business_glossary.GlossaryTerm + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateGlossaryTerm(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_location(self): + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + + class _GetLocation( + _BaseBusinessGlossaryServiceRestTransport._BaseGetLocation, + BusinessGlossaryServiceRestStub, + ): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.GetLocation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: locations_pb2.GetLocationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.Location: + r"""Call the get location method over HTTP. + + Args: + request (locations_pb2.GetLocationRequest): + The request object for GetLocation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.Location: Response from GetLocation method. + """ + + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseGetLocation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_location(request, metadata) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseGetLocation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseGetLocation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.GetLocation", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "GetLocation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BusinessGlossaryServiceRestTransport._GetLocation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.Location() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_location(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.GetLocation", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "GetLocation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_locations(self): + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + + class _ListLocations( + _BaseBusinessGlossaryServiceRestTransport._BaseListLocations, + BusinessGlossaryServiceRestStub, + ): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.ListLocations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: locations_pb2.ListLocationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> locations_pb2.ListLocationsResponse: + r"""Call the list locations method over HTTP. + + Args: + request (locations_pb2.ListLocationsRequest): + The request object for ListLocations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + locations_pb2.ListLocationsResponse: Response from ListLocations method. + """ + + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseListLocations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_locations(request, metadata) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseListLocations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseListLocations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.ListLocations", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "ListLocations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + BusinessGlossaryServiceRestTransport._ListLocations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = locations_pb2.ListLocationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_locations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.ListLocations", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "ListLocations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation( + _BaseBusinessGlossaryServiceRestTransport._BaseCancelOperation, + BusinessGlossaryServiceRestStub, + ): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.CancelOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseCancelOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseCancelOperation._get_transcoded_request( + http_options, request + ) + + body = _BaseBusinessGlossaryServiceRestTransport._BaseCancelOperation._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseCancelOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.CancelOperation", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + BusinessGlossaryServiceRestTransport._CancelOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation( + _BaseBusinessGlossaryServiceRestTransport._BaseDeleteOperation, + BusinessGlossaryServiceRestStub, + ): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.DeleteOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseDeleteOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteOperation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseDeleteOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.DeleteOperation", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + BusinessGlossaryServiceRestTransport._DeleteOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation( + _BaseBusinessGlossaryServiceRestTransport._BaseGetOperation, + BusinessGlossaryServiceRestStub, + ): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseGetOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.GetOperation", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BusinessGlossaryServiceRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.GetOperation", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations( + _BaseBusinessGlossaryServiceRestTransport._BaseListOperations, + BusinessGlossaryServiceRestStub, + ): + def __hash__(self): + return hash("BusinessGlossaryServiceRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = ( + _BaseBusinessGlossaryServiceRestTransport._BaseListOperations._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseBusinessGlossaryServiceRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseBusinessGlossaryServiceRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.BusinessGlossaryServiceClient.ListOperations", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + BusinessGlossaryServiceRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.ListOperations", + extra={ + "serviceName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("BusinessGlossaryServiceRestTransport",) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/rest_base.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/rest_base.py new file mode 100644 index 000000000000..16c3ee1bdd58 --- /dev/null +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/business_glossary_service/transports/rest_base.py @@ -0,0 +1,1046 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1, path_template +from google.cloud.location import locations_pb2 # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format + +from google.cloud.dataplex_v1.types import business_glossary + +from .base import DEFAULT_CLIENT_INFO, BusinessGlossaryServiceTransport + + +class _BaseBusinessGlossaryServiceRestTransport(BusinessGlossaryServiceTransport): + """Base REST backend transport for BusinessGlossaryService. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "dataplex.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'dataplex.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseCreateGlossary: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "glossaryId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/glossaries", + "body": "glossary", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = business_glossary.CreateGlossaryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossary._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateGlossaryCategory: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "categoryId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/glossaries/*}/categories", + "body": "category", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = business_glossary.CreateGlossaryCategoryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryCategory._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateGlossaryTerm: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "termId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/glossaries/*}/terms", + "body": "term", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = business_glossary.CreateGlossaryTermRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBusinessGlossaryServiceRestTransport._BaseCreateGlossaryTerm._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteGlossary: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/glossaries/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = business_glossary.DeleteGlossaryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossary._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteGlossaryCategory: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/glossaries/*/categories/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = business_glossary.DeleteGlossaryCategoryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryCategory._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteGlossaryTerm: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/glossaries/*/terms/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = business_glossary.DeleteGlossaryTermRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBusinessGlossaryServiceRestTransport._BaseDeleteGlossaryTerm._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetGlossary: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/glossaries/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = business_glossary.GetGlossaryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossary._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetGlossaryCategory: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/glossaries/*/categories/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = business_glossary.GetGlossaryCategoryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryCategory._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetGlossaryTerm: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/glossaries/*/terms/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = business_glossary.GetGlossaryTermRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBusinessGlossaryServiceRestTransport._BaseGetGlossaryTerm._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListGlossaries: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/glossaries", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = business_glossary.ListGlossariesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaries._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListGlossaryCategories: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/glossaries/*}/categories", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = business_glossary.ListGlossaryCategoriesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryCategories._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListGlossaryTerms: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/glossaries/*}/terms", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = business_glossary.ListGlossaryTermsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBusinessGlossaryServiceRestTransport._BaseListGlossaryTerms._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateGlossary: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{glossary.name=projects/*/locations/*/glossaries/*}", + "body": "glossary", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = business_glossary.UpdateGlossaryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossary._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateGlossaryCategory: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{category.name=projects/*/locations/*/glossaries/*/categories/*}", + "body": "category", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = business_glossary.UpdateGlossaryCategoryRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryCategory._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateGlossaryTerm: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "updateMask": {}, + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{term.name=projects/*/locations/*/glossaries/*/terms/*}", + "body": "term", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = business_glossary.UpdateGlossaryTermRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseBusinessGlossaryServiceRestTransport._BaseUpdateGlossaryTerm._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetLocation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListLocations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=organizations/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "delete", + "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, + { + "method": "get", + "uri": "/v1/{name=organizations/*/locations/*}/operations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + +__all__ = ("_BaseBusinessGlossaryServiceRestTransport",) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/async_client.py index 09ba265260b3..9f620c738b28 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/async_client.py @@ -73,9 +73,9 @@ class CatalogServiceAsyncClient: """The primary resources offered by this service are - EntryGroups, EntryTypes, AspectTypes, and Entries. They - collectively let data administrators organize, manage, secure, - and catalog data located across cloud projects in their + EntryGroups, EntryTypes, AspectTypes, Entries and EntryLinks. + They collectively let data administrators organize, manage, + secure, and catalog data located across cloud projects in their organization in a variety of storage systems, including Cloud Storage and BigQuery. """ @@ -95,8 +95,12 @@ class CatalogServiceAsyncClient: parse_entry_path = staticmethod(CatalogServiceClient.parse_entry_path) entry_group_path = staticmethod(CatalogServiceClient.entry_group_path) parse_entry_group_path = staticmethod(CatalogServiceClient.parse_entry_group_path) + entry_link_path = staticmethod(CatalogServiceClient.entry_link_path) + parse_entry_link_path = staticmethod(CatalogServiceClient.parse_entry_link_path) entry_type_path = staticmethod(CatalogServiceClient.entry_type_path) parse_entry_type_path = staticmethod(CatalogServiceClient.parse_entry_type_path) + glossary_path = staticmethod(CatalogServiceClient.glossary_path) + parse_glossary_path = staticmethod(CatalogServiceClient.parse_glossary_path) metadata_job_path = staticmethod(CatalogServiceClient.metadata_job_path) parse_metadata_job_path = staticmethod(CatalogServiceClient.parse_metadata_job_path) common_billing_account_path = staticmethod( @@ -629,7 +633,7 @@ async def sample_delete_entry_type(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.DeleteEntryTypeRequest, dict]]): - The request object. Delele EntryType Request. + The request object. Delete EntryType Request. name (:class:`str`): Required. The resource name of the EntryType: ``projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}``. @@ -1283,7 +1287,7 @@ async def sample_delete_aspect_type(): Args: request (Optional[Union[google.cloud.dataplex_v1.types.DeleteAspectTypeRequest, dict]]): - The request object. Delele AspectType Request. + The request object. Delete AspectType Request. name (:class:`str`): Required. The resource name of the AspectType: ``projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}``. @@ -1661,7 +1665,7 @@ async def sample_create_entry_group(): parent (:class:`str`): Required. The resource name of the entryGroup, of the form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -3021,7 +3025,7 @@ async def sample_search_entries(): name (:class:`str`): Required. The project to which the request should be attributed in the following form: - ``projects/{project}/locations/{location}``. + ``projects/{project}/locations/global``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -3029,7 +3033,7 @@ async def sample_search_entries(): query (:class:`str`): Required. The query against which entries in scope should be matched. The query syntax is defined in - `Search syntax for Dataplex + `Search syntax for Dataplex Universal Catalog `__. This corresponds to the ``query`` field @@ -3124,8 +3128,8 @@ async def create_metadata_job( metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation_async.AsyncOperation: r"""Creates a metadata job. For example, use a metadata - job to import Dataplex Catalog entries and aspects from - a third-party system into Dataplex. + job to import metadata from a third-party system into + Dataplex Universal Catalog. .. code-block:: python @@ -3605,6 +3609,367 @@ async def sample_cancel_metadata_job(): metadata=metadata, ) + async def create_entry_link( + self, + request: Optional[Union[catalog.CreateEntryLinkRequest, dict]] = None, + *, + parent: Optional[str] = None, + entry_link: Optional[catalog.EntryLink] = None, + entry_link_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.EntryLink: + r"""Creates an Entry Link. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_create_entry_link(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + entry_link = dataplex_v1.EntryLink() + entry_link.entry_link_type = "entry_link_type_value" + entry_link.entry_references.name = "name_value" + entry_link.entry_references.type_ = "TARGET" + + request = dataplex_v1.CreateEntryLinkRequest( + parent="parent_value", + entry_link_id="entry_link_id_value", + entry_link=entry_link, + ) + + # Make the request + response = await client.create_entry_link(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.CreateEntryLinkRequest, dict]]): + The request object. Request message for CreateEntryLink. + parent (:class:`str`): + Required. The resource name of the parent Entry Group: + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_link (:class:`google.cloud.dataplex_v1.types.EntryLink`): + Required. Entry Link resource. + This corresponds to the ``entry_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_link_id (:class:`str`): + Required. Entry Link identifier + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the EntryGroup. + + This corresponds to the ``entry_link_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.EntryLink: + EntryLink represents a link between + two Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, entry_link, entry_link_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CreateEntryLinkRequest): + request = catalog.CreateEntryLinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entry_link is not None: + request.entry_link = entry_link + if entry_link_id is not None: + request.entry_link_id = entry_link_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_entry_link + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_entry_link( + self, + request: Optional[Union[catalog.DeleteEntryLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.EntryLink: + r"""Deletes an Entry Link. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_delete_entry_link(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntryLinkRequest( + name="name_value", + ) + + # Make the request + response = await client.delete_entry_link(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.DeleteEntryLinkRequest, dict]]): + The request object. Request message for DeleteEntryLink. + name (:class:`str`): + Required. The resource name of the Entry Link: + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entryLinks/{entry_link_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.EntryLink: + EntryLink represents a link between + two Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.DeleteEntryLinkRequest): + request = catalog.DeleteEntryLinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_entry_link + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_entry_link( + self, + request: Optional[Union[catalog.GetEntryLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.EntryLink: + r"""Gets an Entry Link. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + async def sample_get_entry_link(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntryLinkRequest( + name="name_value", + ) + + # Make the request + response = await client.get_entry_link(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.dataplex_v1.types.GetEntryLinkRequest, dict]]): + The request object. Request message for GetEntryLink. + name (:class:`str`): + Required. The resource name of the Entry Link: + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entryLinks/{entry_link_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.EntryLink: + EntryLink represents a link between + two Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.GetEntryLinkRequest): + request = catalog.GetEntryLinkRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_entry_link + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[operations_pb2.ListOperationsRequest] = None, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/client.py index 8cf6cb9421c6..18128a64f5b6 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/client.py @@ -119,9 +119,9 @@ def get_transport_class( class CatalogServiceClient(metaclass=CatalogServiceClientMeta): """The primary resources offered by this service are - EntryGroups, EntryTypes, AspectTypes, and Entries. They - collectively let data administrators organize, manage, secure, - and catalog data located across cloud projects in their + EntryGroups, EntryTypes, AspectTypes, Entries and EntryLinks. + They collectively let data administrators organize, manage, + secure, and catalog data located across cloud projects in their organization in a variety of storage systems, including Cloud Storage and BigQuery. """ @@ -284,6 +284,30 @@ def parse_entry_group_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def entry_link_path( + project: str, + location: str, + entry_group: str, + entry_link: str, + ) -> str: + """Returns a fully-qualified entry_link string.""" + return "projects/{project}/locations/{location}/entryGroups/{entry_group}/entryLinks/{entry_link}".format( + project=project, + location=location, + entry_group=entry_group, + entry_link=entry_link, + ) + + @staticmethod + def parse_entry_link_path(path: str) -> Dict[str, str]: + """Parses a entry_link path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/entryGroups/(?P.+?)/entryLinks/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def entry_type_path( project: str, @@ -306,6 +330,28 @@ def parse_entry_type_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def glossary_path( + project: str, + location: str, + glossary: str, + ) -> str: + """Returns a fully-qualified glossary string.""" + return "projects/{project}/locations/{location}/glossaries/{glossary}".format( + project=project, + location=location, + glossary=glossary, + ) + + @staticmethod + def parse_glossary_path(path: str) -> Dict[str, str]: + """Parses a glossary path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/glossaries/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def metadata_job_path( project: str, @@ -1134,7 +1180,7 @@ def sample_delete_entry_type(): Args: request (Union[google.cloud.dataplex_v1.types.DeleteEntryTypeRequest, dict]): - The request object. Delele EntryType Request. + The request object. Delete EntryType Request. name (str): Required. The resource name of the EntryType: ``projects/{project_number}/locations/{location_id}/entryTypes/{entry_type_id}``. @@ -1773,7 +1819,7 @@ def sample_delete_aspect_type(): Args: request (Union[google.cloud.dataplex_v1.types.DeleteAspectTypeRequest, dict]): - The request object. Delele AspectType Request. + The request object. Delete AspectType Request. name (str): Required. The resource name of the AspectType: ``projects/{project_number}/locations/{location_id}/aspectTypes/{aspect_type_id}``. @@ -2142,7 +2188,7 @@ def sample_create_entry_group(): parent (str): Required. The resource name of the entryGroup, of the form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -3470,7 +3516,7 @@ def sample_search_entries(): name (str): Required. The project to which the request should be attributed in the following form: - ``projects/{project}/locations/{location}``. + ``projects/{project}/locations/global``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -3478,7 +3524,7 @@ def sample_search_entries(): query (str): Required. The query against which entries in scope should be matched. The query syntax is defined in - `Search syntax for Dataplex + `Search syntax for Dataplex Universal Catalog `__. This corresponds to the ``query`` field @@ -3570,8 +3616,8 @@ def create_metadata_job( metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operation.Operation: r"""Creates a metadata job. For example, use a metadata - job to import Dataplex Catalog entries and aspects from - a third-party system into Dataplex. + job to import metadata from a third-party system into + Dataplex Universal Catalog. .. code-block:: python @@ -4039,6 +4085,358 @@ def sample_cancel_metadata_job(): metadata=metadata, ) + def create_entry_link( + self, + request: Optional[Union[catalog.CreateEntryLinkRequest, dict]] = None, + *, + parent: Optional[str] = None, + entry_link: Optional[catalog.EntryLink] = None, + entry_link_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.EntryLink: + r"""Creates an Entry Link. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_create_entry_link(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + entry_link = dataplex_v1.EntryLink() + entry_link.entry_link_type = "entry_link_type_value" + entry_link.entry_references.name = "name_value" + entry_link.entry_references.type_ = "TARGET" + + request = dataplex_v1.CreateEntryLinkRequest( + parent="parent_value", + entry_link_id="entry_link_id_value", + entry_link=entry_link, + ) + + # Make the request + response = client.create_entry_link(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.CreateEntryLinkRequest, dict]): + The request object. Request message for CreateEntryLink. + parent (str): + Required. The resource name of the parent Entry Group: + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_link (google.cloud.dataplex_v1.types.EntryLink): + Required. Entry Link resource. + This corresponds to the ``entry_link`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + entry_link_id (str): + Required. Entry Link identifier + + - Must contain only lowercase letters, numbers and + hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the EntryGroup. + + This corresponds to the ``entry_link_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.EntryLink: + EntryLink represents a link between + two Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, entry_link, entry_link_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.CreateEntryLinkRequest): + request = catalog.CreateEntryLinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if entry_link is not None: + request.entry_link = entry_link + if entry_link_id is not None: + request.entry_link_id = entry_link_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_entry_link] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_entry_link( + self, + request: Optional[Union[catalog.DeleteEntryLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.EntryLink: + r"""Deletes an Entry Link. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_delete_entry_link(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntryLinkRequest( + name="name_value", + ) + + # Make the request + response = client.delete_entry_link(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.DeleteEntryLinkRequest, dict]): + The request object. Request message for DeleteEntryLink. + name (str): + Required. The resource name of the Entry Link: + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entryLinks/{entry_link_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.EntryLink: + EntryLink represents a link between + two Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.DeleteEntryLinkRequest): + request = catalog.DeleteEntryLinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_entry_link] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_entry_link( + self, + request: Optional[Union[catalog.GetEntryLinkRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.EntryLink: + r"""Gets an Entry Link. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import dataplex_v1 + + def sample_get_entry_link(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntryLinkRequest( + name="name_value", + ) + + # Make the request + response = client.get_entry_link(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.dataplex_v1.types.GetEntryLinkRequest, dict]): + The request object. Request message for GetEntryLink. + name (str): + Required. The resource name of the Entry Link: + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entryLinks/{entry_link_id}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.dataplex_v1.types.EntryLink: + EntryLink represents a link between + two Entries. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, catalog.GetEntryLinkRequest): + request = catalog.GetEntryLinkRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_entry_link] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "CatalogServiceClient": return self diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/base.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/base.py index be553e45fcb1..8c2fb9760b70 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/base.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/base.py @@ -377,6 +377,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.create_entry_link: gapic_v1.method.wrap_method( + self.create_entry_link, + default_timeout=None, + client_info=client_info, + ), + self.delete_entry_link: gapic_v1.method.wrap_method( + self.delete_entry_link, + default_timeout=None, + client_info=client_info, + ), + self.get_entry_link: gapic_v1.method.wrap_method( + self.get_entry_link, + default_timeout=None, + client_info=client_info, + ), self.get_location: gapic_v1.method.wrap_method( self.get_location, default_timeout=None, @@ -661,6 +676,33 @@ def cancel_metadata_job( ]: raise NotImplementedError() + @property + def create_entry_link( + self, + ) -> Callable[ + [catalog.CreateEntryLinkRequest], + Union[catalog.EntryLink, Awaitable[catalog.EntryLink]], + ]: + raise NotImplementedError() + + @property + def delete_entry_link( + self, + ) -> Callable[ + [catalog.DeleteEntryLinkRequest], + Union[catalog.EntryLink, Awaitable[catalog.EntryLink]], + ]: + raise NotImplementedError() + + @property + def get_entry_link( + self, + ) -> Callable[ + [catalog.GetEntryLinkRequest], + Union[catalog.EntryLink, Awaitable[catalog.EntryLink]], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc.py index e180c13a42ec..7396cba863b2 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc.py @@ -116,9 +116,9 @@ class CatalogServiceGrpcTransport(CatalogServiceTransport): """gRPC backend transport for CatalogService. The primary resources offered by this service are - EntryGroups, EntryTypes, AspectTypes, and Entries. They - collectively let data administrators organize, manage, secure, - and catalog data located across cloud projects in their + EntryGroups, EntryTypes, AspectTypes, Entries and EntryLinks. + They collectively let data administrators organize, manage, + secure, and catalog data located across cloud projects in their organization in a variety of storage systems, including Cloud Storage and BigQuery. @@ -917,8 +917,8 @@ def create_metadata_job( r"""Return a callable for the create metadata job method over gRPC. Creates a metadata job. For example, use a metadata - job to import Dataplex Catalog entries and aspects from - a third-party system into Dataplex. + job to import metadata from a third-party system into + Dataplex Universal Catalog. Returns: Callable[[~.CreateMetadataJobRequest], @@ -1022,6 +1022,84 @@ def cancel_metadata_job( ) return self._stubs["cancel_metadata_job"] + @property + def create_entry_link( + self, + ) -> Callable[[catalog.CreateEntryLinkRequest], catalog.EntryLink]: + r"""Return a callable for the create entry link method over gRPC. + + Creates an Entry Link. + + Returns: + Callable[[~.CreateEntryLinkRequest], + ~.EntryLink]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_entry_link" not in self._stubs: + self._stubs["create_entry_link"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.CatalogService/CreateEntryLink", + request_serializer=catalog.CreateEntryLinkRequest.serialize, + response_deserializer=catalog.EntryLink.deserialize, + ) + return self._stubs["create_entry_link"] + + @property + def delete_entry_link( + self, + ) -> Callable[[catalog.DeleteEntryLinkRequest], catalog.EntryLink]: + r"""Return a callable for the delete entry link method over gRPC. + + Deletes an Entry Link. + + Returns: + Callable[[~.DeleteEntryLinkRequest], + ~.EntryLink]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_entry_link" not in self._stubs: + self._stubs["delete_entry_link"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.CatalogService/DeleteEntryLink", + request_serializer=catalog.DeleteEntryLinkRequest.serialize, + response_deserializer=catalog.EntryLink.deserialize, + ) + return self._stubs["delete_entry_link"] + + @property + def get_entry_link( + self, + ) -> Callable[[catalog.GetEntryLinkRequest], catalog.EntryLink]: + r"""Return a callable for the get entry link method over gRPC. + + Gets an Entry Link. + + Returns: + Callable[[~.GetEntryLinkRequest], + ~.EntryLink]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_entry_link" not in self._stubs: + self._stubs["get_entry_link"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.CatalogService/GetEntryLink", + request_serializer=catalog.GetEntryLinkRequest.serialize, + response_deserializer=catalog.EntryLink.deserialize, + ) + return self._stubs["get_entry_link"] + def close(self): self._logged_channel.close() diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc_asyncio.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc_asyncio.py index 24d73cf3f057..9eac535a065e 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/grpc_asyncio.py @@ -122,9 +122,9 @@ class CatalogServiceGrpcAsyncIOTransport(CatalogServiceTransport): """gRPC AsyncIO backend transport for CatalogService. The primary resources offered by this service are - EntryGroups, EntryTypes, AspectTypes, and Entries. They - collectively let data administrators organize, manage, secure, - and catalog data located across cloud projects in their + EntryGroups, EntryTypes, AspectTypes, Entries and EntryLinks. + They collectively let data administrators organize, manage, + secure, and catalog data located across cloud projects in their organization in a variety of storage systems, including Cloud Storage and BigQuery. @@ -963,8 +963,8 @@ def create_metadata_job( r"""Return a callable for the create metadata job method over gRPC. Creates a metadata job. For example, use a metadata - job to import Dataplex Catalog entries and aspects from - a third-party system into Dataplex. + job to import metadata from a third-party system into + Dataplex Universal Catalog. Returns: Callable[[~.CreateMetadataJobRequest], @@ -1070,6 +1070,84 @@ def cancel_metadata_job( ) return self._stubs["cancel_metadata_job"] + @property + def create_entry_link( + self, + ) -> Callable[[catalog.CreateEntryLinkRequest], Awaitable[catalog.EntryLink]]: + r"""Return a callable for the create entry link method over gRPC. + + Creates an Entry Link. + + Returns: + Callable[[~.CreateEntryLinkRequest], + Awaitable[~.EntryLink]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_entry_link" not in self._stubs: + self._stubs["create_entry_link"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.CatalogService/CreateEntryLink", + request_serializer=catalog.CreateEntryLinkRequest.serialize, + response_deserializer=catalog.EntryLink.deserialize, + ) + return self._stubs["create_entry_link"] + + @property + def delete_entry_link( + self, + ) -> Callable[[catalog.DeleteEntryLinkRequest], Awaitable[catalog.EntryLink]]: + r"""Return a callable for the delete entry link method over gRPC. + + Deletes an Entry Link. + + Returns: + Callable[[~.DeleteEntryLinkRequest], + Awaitable[~.EntryLink]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_entry_link" not in self._stubs: + self._stubs["delete_entry_link"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.CatalogService/DeleteEntryLink", + request_serializer=catalog.DeleteEntryLinkRequest.serialize, + response_deserializer=catalog.EntryLink.deserialize, + ) + return self._stubs["delete_entry_link"] + + @property + def get_entry_link( + self, + ) -> Callable[[catalog.GetEntryLinkRequest], Awaitable[catalog.EntryLink]]: + r"""Return a callable for the get entry link method over gRPC. + + Gets an Entry Link. + + Returns: + Callable[[~.GetEntryLinkRequest], + Awaitable[~.EntryLink]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_entry_link" not in self._stubs: + self._stubs["get_entry_link"] = self._logged_channel.unary_unary( + "/google.cloud.dataplex.v1.CatalogService/GetEntryLink", + request_serializer=catalog.GetEntryLinkRequest.serialize, + response_deserializer=catalog.EntryLink.deserialize, + ) + return self._stubs["get_entry_link"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -1313,6 +1391,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.create_entry_link: self._wrap_method( + self.create_entry_link, + default_timeout=None, + client_info=client_info, + ), + self.delete_entry_link: self._wrap_method( + self.delete_entry_link, + default_timeout=None, + client_info=client_info, + ), + self.get_entry_link: self._wrap_method( + self.get_entry_link, + default_timeout=None, + client_info=client_info, + ), self.get_location: self._wrap_method( self.get_location, default_timeout=None, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/rest.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/rest.py index 5567f7a0b572..0d2b5af96919 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/rest.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/rest.py @@ -105,6 +105,14 @@ def post_create_entry_group(self, response): logging.log(f"Received response: {response}") return response + def pre_create_entry_link(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_entry_link(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_entry_type(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -145,6 +153,14 @@ def post_delete_entry_group(self, response): logging.log(f"Received response: {response}") return response + def pre_delete_entry_link(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_entry_link(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete_entry_type(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -177,6 +193,14 @@ def post_get_entry_group(self, response): logging.log(f"Received response: {response}") return response + def pre_get_entry_link(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_entry_link(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_entry_type(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -439,6 +463,50 @@ def post_create_entry_group_with_metadata( """ return response, metadata + def pre_create_entry_link( + self, + request: catalog.CreateEntryLinkRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[catalog.CreateEntryLinkRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for create_entry_link + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_create_entry_link(self, response: catalog.EntryLink) -> catalog.EntryLink: + """Post-rpc interceptor for create_entry_link + + DEPRECATED. Please use the `post_create_entry_link_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. This `post_create_entry_link` interceptor runs + before the `post_create_entry_link_with_metadata` interceptor. + """ + return response + + def post_create_entry_link_with_metadata( + self, + response: catalog.EntryLink, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[catalog.EntryLink, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_entry_link + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CatalogService server but before it is returned to user code. + + We recommend only using this `post_create_entry_link_with_metadata` + interceptor in new development instead of the `post_create_entry_link` interceptor. + When both interceptors are used, this `post_create_entry_link_with_metadata` interceptor runs after the + `post_create_entry_link` interceptor. The (possibly modified) response returned by + `post_create_entry_link` will be passed to + `post_create_entry_link_with_metadata`. + """ + return response, metadata + def pre_create_entry_type( self, request: catalog.CreateEntryTypeRequest, @@ -671,6 +739,50 @@ def post_delete_entry_group_with_metadata( """ return response, metadata + def pre_delete_entry_link( + self, + request: catalog.DeleteEntryLinkRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[catalog.DeleteEntryLinkRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for delete_entry_link + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_delete_entry_link(self, response: catalog.EntryLink) -> catalog.EntryLink: + """Post-rpc interceptor for delete_entry_link + + DEPRECATED. Please use the `post_delete_entry_link_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. This `post_delete_entry_link` interceptor runs + before the `post_delete_entry_link_with_metadata` interceptor. + """ + return response + + def post_delete_entry_link_with_metadata( + self, + response: catalog.EntryLink, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[catalog.EntryLink, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_entry_link + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CatalogService server but before it is returned to user code. + + We recommend only using this `post_delete_entry_link_with_metadata` + interceptor in new development instead of the `post_delete_entry_link` interceptor. + When both interceptors are used, this `post_delete_entry_link_with_metadata` interceptor runs after the + `post_delete_entry_link` interceptor. The (possibly modified) response returned by + `post_delete_entry_link` will be passed to + `post_delete_entry_link_with_metadata`. + """ + return response, metadata + def pre_delete_entry_type( self, request: catalog.DeleteEntryTypeRequest, @@ -847,6 +959,50 @@ def post_get_entry_group_with_metadata( """ return response, metadata + def pre_get_entry_link( + self, + request: catalog.GetEntryLinkRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[catalog.GetEntryLinkRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_entry_link + + Override in a subclass to manipulate the request or metadata + before they are sent to the CatalogService server. + """ + return request, metadata + + def post_get_entry_link(self, response: catalog.EntryLink) -> catalog.EntryLink: + """Post-rpc interceptor for get_entry_link + + DEPRECATED. Please use the `post_get_entry_link_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the CatalogService server but before + it is returned to user code. This `post_get_entry_link` interceptor runs + before the `post_get_entry_link_with_metadata` interceptor. + """ + return response + + def post_get_entry_link_with_metadata( + self, + response: catalog.EntryLink, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[catalog.EntryLink, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_entry_link + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the CatalogService server but before it is returned to user code. + + We recommend only using this `post_get_entry_link_with_metadata` + interceptor in new development instead of the `post_get_entry_link` interceptor. + When both interceptors are used, this `post_get_entry_link_with_metadata` interceptor runs after the + `post_get_entry_link` interceptor. The (possibly modified) response returned by + `post_get_entry_link` will be passed to + `post_get_entry_link_with_metadata`. + """ + return response, metadata + def pre_get_entry_type( self, request: catalog.GetEntryTypeRequest, @@ -1605,9 +1761,9 @@ class CatalogServiceRestTransport(_BaseCatalogServiceRestTransport): """REST backend synchronous transport for CatalogService. The primary resources offered by this service are - EntryGroups, EntryTypes, AspectTypes, and Entries. They - collectively let data administrators organize, manage, secure, - and catalog data located across cloud projects in their + EntryGroups, EntryTypes, AspectTypes, Entries and EntryLinks. + They collectively let data administrators organize, manage, + secure, and catalog data located across cloud projects in their organization in a variety of storage systems, including Cloud Storage and BigQuery. @@ -1735,7 +1891,7 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: }, { "method": "get", - "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + "uri": "/v1/{name=organizations/*/locations/*}/operations", }, ], } @@ -2329,6 +2485,160 @@ def __call__( ) return resp + class _CreateEntryLink( + _BaseCatalogServiceRestTransport._BaseCreateEntryLink, CatalogServiceRestStub + ): + def __hash__(self): + return hash("CatalogServiceRestTransport.CreateEntryLink") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: catalog.CreateEntryLinkRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.EntryLink: + r"""Call the create entry link method over HTTP. + + Args: + request (~.catalog.CreateEntryLinkRequest): + The request object. Request message for CreateEntryLink. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.catalog.EntryLink: + EntryLink represents a link between + two Entries. + + """ + + http_options = ( + _BaseCatalogServiceRestTransport._BaseCreateEntryLink._get_http_options() + ) + + request, metadata = self._interceptor.pre_create_entry_link( + request, metadata + ) + transcoded_request = _BaseCatalogServiceRestTransport._BaseCreateEntryLink._get_transcoded_request( + http_options, request + ) + + body = _BaseCatalogServiceRestTransport._BaseCreateEntryLink._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseCreateEntryLink._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.CreateEntryLink", + extra={ + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "CreateEntryLink", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._CreateEntryLink._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = catalog.EntryLink() + pb_resp = catalog.EntryLink.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_entry_link(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_entry_link_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = catalog.EntryLink.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceClient.create_entry_link", + extra={ + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "CreateEntryLink", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _CreateEntryType( _BaseCatalogServiceRestTransport._BaseCreateEntryType, CatalogServiceRestStub ): @@ -2675,7 +2985,7 @@ def __call__( Args: request (~.catalog.DeleteAspectTypeRequest): - The request object. Delele AspectType Request. + The request object. Delete AspectType Request. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3076,6 +3386,154 @@ def __call__( ) return resp + class _DeleteEntryLink( + _BaseCatalogServiceRestTransport._BaseDeleteEntryLink, CatalogServiceRestStub + ): + def __hash__(self): + return hash("CatalogServiceRestTransport.DeleteEntryLink") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: catalog.DeleteEntryLinkRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.EntryLink: + r"""Call the delete entry link method over HTTP. + + Args: + request (~.catalog.DeleteEntryLinkRequest): + The request object. Request message for DeleteEntryLink. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.catalog.EntryLink: + EntryLink represents a link between + two Entries. + + """ + + http_options = ( + _BaseCatalogServiceRestTransport._BaseDeleteEntryLink._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_entry_link( + request, metadata + ) + transcoded_request = _BaseCatalogServiceRestTransport._BaseDeleteEntryLink._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseDeleteEntryLink._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.DeleteEntryLink", + extra={ + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "DeleteEntryLink", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._DeleteEntryLink._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = catalog.EntryLink() + pb_resp = catalog.EntryLink.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_entry_link(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_entry_link_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = catalog.EntryLink.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceClient.delete_entry_link", + extra={ + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "DeleteEntryLink", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _DeleteEntryType( _BaseCatalogServiceRestTransport._BaseDeleteEntryType, CatalogServiceRestStub ): @@ -3116,7 +3574,7 @@ def __call__( Args: request (~.catalog.DeleteEntryTypeRequest): - The request object. Delele EntryType Request. + The request object. Delete EntryType Request. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3668,6 +4126,152 @@ def __call__( ) return resp + class _GetEntryLink( + _BaseCatalogServiceRestTransport._BaseGetEntryLink, CatalogServiceRestStub + ): + def __hash__(self): + return hash("CatalogServiceRestTransport.GetEntryLink") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: catalog.GetEntryLinkRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> catalog.EntryLink: + r"""Call the get entry link method over HTTP. + + Args: + request (~.catalog.GetEntryLinkRequest): + The request object. Request message for GetEntryLink. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.catalog.EntryLink: + EntryLink represents a link between + two Entries. + + """ + + http_options = ( + _BaseCatalogServiceRestTransport._BaseGetEntryLink._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_entry_link(request, metadata) + transcoded_request = _BaseCatalogServiceRestTransport._BaseGetEntryLink._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseCatalogServiceRestTransport._BaseGetEntryLink._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.dataplex_v1.CatalogServiceClient.GetEntryLink", + extra={ + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "GetEntryLink", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = CatalogServiceRestTransport._GetEntryLink._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = catalog.EntryLink() + pb_resp = catalog.EntryLink.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_entry_link(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_entry_link_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = catalog.EntryLink.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.dataplex_v1.CatalogServiceClient.get_entry_link", + extra={ + "serviceName": "google.cloud.dataplex.v1.CatalogService", + "rpcName": "GetEntryLink", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _GetEntryType( _BaseCatalogServiceRestTransport._BaseGetEntryType, CatalogServiceRestStub ): @@ -5624,6 +6228,14 @@ def create_entry_group( # In C++ this would require a dynamic_cast return self._CreateEntryGroup(self._session, self._host, self._interceptor) # type: ignore + @property + def create_entry_link( + self, + ) -> Callable[[catalog.CreateEntryLinkRequest], catalog.EntryLink]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateEntryLink(self._session, self._host, self._interceptor) # type: ignore + @property def create_entry_type( self, @@ -5662,6 +6274,14 @@ def delete_entry_group( # In C++ this would require a dynamic_cast return self._DeleteEntryGroup(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_entry_link( + self, + ) -> Callable[[catalog.DeleteEntryLinkRequest], catalog.EntryLink]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteEntryLink(self._session, self._host, self._interceptor) # type: ignore + @property def delete_entry_type( self, @@ -5692,6 +6312,14 @@ def get_entry_group( # In C++ this would require a dynamic_cast return self._GetEntryGroup(self._session, self._host, self._interceptor) # type: ignore + @property + def get_entry_link( + self, + ) -> Callable[[catalog.GetEntryLinkRequest], catalog.EntryLink]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetEntryLink(self._session, self._host, self._interceptor) # type: ignore + @property def get_entry_type( self, diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/rest_base.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/rest_base.py index 08197e264a59..52abebb66f4a 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/rest_base.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/catalog_service/transports/rest_base.py @@ -326,6 +326,65 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseCreateEntryLink: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "entryLinkId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/entryGroups/*}/entryLinks", + "body": "entry_link", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = catalog.CreateEntryLinkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseCatalogServiceRestTransport._BaseCreateEntryLink._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseCreateEntryType: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -583,6 +642,53 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseDeleteEntryLink: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/entryGroups/*/entryLinks/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = catalog.DeleteEntryLinkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseCatalogServiceRestTransport._BaseDeleteEntryLink._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseDeleteEntryType: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -771,6 +877,53 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseGetEntryLink: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/entryGroups/*/entryLinks/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = catalog.GetEntryLinkRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseCatalogServiceRestTransport._BaseGetEntryLink._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseGetEntryType: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -1589,7 +1742,7 @@ def _get_http_options(): }, { "method": "get", - "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + "uri": "/v1/{name=organizations/*/locations/*}/operations", }, ] return http_options diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/async_client.py index fb9385f91bd1..76c7d432686f 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/async_client.py @@ -72,7 +72,9 @@ class CmekServiceAsyncClient: - """Dataplex Cmek Service""" + """Dataplex Universal Catalog Customer Managed Encryption Keys + (CMEK) Service + """ _client: CmekServiceClient diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/client.py index e326c3747c13..a7ba321eb8db 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/client.py @@ -116,7 +116,9 @@ def get_transport_class( class CmekServiceClient(metaclass=CmekServiceClientMeta): - """Dataplex Cmek Service""" + """Dataplex Universal Catalog Customer Managed Encryption Keys + (CMEK) Service + """ @staticmethod def _get_default_mtls_endpoint(api_endpoint): diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/transports/grpc.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/transports/grpc.py index aef104f8dd51..8c33cc7ba381 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/transports/grpc.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/transports/grpc.py @@ -114,7 +114,8 @@ def intercept_unary_unary(self, continuation, client_call_details, request): class CmekServiceGrpcTransport(CmekServiceTransport): """gRPC backend transport for CmekService. - Dataplex Cmek Service + Dataplex Universal Catalog Customer Managed Encryption Keys + (CMEK) Service This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/transports/grpc_asyncio.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/transports/grpc_asyncio.py index 8f5ab10967a7..8bcd3af4b6a8 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/transports/grpc_asyncio.py @@ -120,7 +120,8 @@ async def intercept_unary_unary(self, continuation, client_call_details, request class CmekServiceGrpcAsyncIOTransport(CmekServiceTransport): """gRPC AsyncIO backend transport for CmekService. - Dataplex Cmek Service + Dataplex Universal Catalog Customer Managed Encryption Keys + (CMEK) Service This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/transports/rest.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/transports/rest.py index 6ad765489106..dd50fbebd7af 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/transports/rest.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/transports/rest.py @@ -521,7 +521,8 @@ class CmekServiceRestStub: class CmekServiceRestTransport(_BaseCmekServiceRestTransport): """REST backend synchronous transport for CmekService. - Dataplex Cmek Service + Dataplex Universal Catalog Customer Managed Encryption Keys + (CMEK) Service This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation @@ -647,7 +648,7 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: }, { "method": "get", - "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + "uri": "/v1/{name=organizations/*/locations/*}/operations", }, ], } diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/transports/rest_base.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/transports/rest_base.py index c4f258b5c33f..2e54082df7f3 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/transports/rest_base.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/cmek_service/transports/rest_base.py @@ -505,7 +505,7 @@ def _get_http_options(): }, { "method": "get", - "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + "uri": "/v1/{name=organizations/*/locations/*}/operations", }, ] return http_options diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/async_client.py index 512d8a8762c8..a7f0ccd1286b 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/async_client.py @@ -71,7 +71,9 @@ class ContentServiceAsyncClient: - """ContentService manages Notebook and SQL Scripts for Dataplex.""" + """ContentService manages Notebook and SQL Scripts for Dataplex + Universal Catalog. + """ _client: ContentServiceClient diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/client.py index 0ca7c45802f7..9b8f553b9c29 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/client.py @@ -117,7 +117,9 @@ def get_transport_class( class ContentServiceClient(metaclass=ContentServiceClientMeta): - """ContentService manages Notebook and SQL Scripts for Dataplex.""" + """ContentService manages Notebook and SQL Scripts for Dataplex + Universal Catalog. + """ @staticmethod def _get_default_mtls_endpoint(api_endpoint): diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc.py index 155db682d69e..af860495abee 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc.py @@ -117,7 +117,8 @@ def intercept_unary_unary(self, continuation, client_call_details, request): class ContentServiceGrpcTransport(ContentServiceTransport): """gRPC backend transport for ContentService. - ContentService manages Notebook and SQL Scripts for Dataplex. + ContentService manages Notebook and SQL Scripts for Dataplex + Universal Catalog. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc_asyncio.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc_asyncio.py index a5efa74a385b..d1c11078c720 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/grpc_asyncio.py @@ -123,7 +123,8 @@ async def intercept_unary_unary(self, continuation, client_call_details, request class ContentServiceGrpcAsyncIOTransport(ContentServiceTransport): """gRPC AsyncIO backend transport for ContentService. - ContentService manages Notebook and SQL Scripts for Dataplex. + ContentService manages Notebook and SQL Scripts for Dataplex + Universal Catalog. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/rest.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/rest.py index 5e0ad44c6dda..fda9c44cde55 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/rest.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/rest.py @@ -640,7 +640,8 @@ class ContentServiceRestStub: class ContentServiceRestTransport(_BaseContentServiceRestTransport): """REST backend synchronous transport for ContentService. - ContentService manages Notebook and SQL Scripts for Dataplex. + ContentService manages Notebook and SQL Scripts for Dataplex + Universal Catalog. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/rest_base.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/rest_base.py index 8101f1f2e67b..7543a0fb9ae8 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/rest_base.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/content_service/transports/rest_base.py @@ -705,7 +705,7 @@ def _get_http_options(): }, { "method": "get", - "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + "uri": "/v1/{name=organizations/*/locations/*}/operations", }, ] return http_options diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py index b1122b6a83a3..3bea356be2aa 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/async_client.py @@ -377,7 +377,7 @@ async def sample_create_data_scan(): Required. The resource name of the parent location: ``projects/{project}/locations/{location_id}`` where ``project`` refers to a *project_id* or *project_number* - and ``location_id`` refers to a GCP region. + and ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -702,8 +702,8 @@ async def sample_delete_data_scan(): Required. The resource name of the dataScan: ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP - region. + *project_number* and ``location_id`` refers to a Google + Cloud region. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -833,8 +833,8 @@ async def sample_get_data_scan(): Required. The resource name of the dataScan: ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP - region. + *project_number* and ``location_id`` refers to a Google + Cloud region. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -966,7 +966,7 @@ async def sample_list_data_scans(): Required. The resource name of the parent location: ``projects/{project}/locations/{location_id}`` where ``project`` refers to a *project_id* or *project_number* - and ``location_id`` refers to a GCP region. + and ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1092,8 +1092,8 @@ async def sample_run_data_scan(): Required. The resource name of the DataScan: ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}``. where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP - region. + *project_number* and ``location_id`` refers to a Google + Cloud region. Only **OnDemand** data scans are allowed. @@ -1205,8 +1205,8 @@ async def sample_get_data_scan_job(): Required. The resource name of the DataScanJob: ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}/jobs/{data_scan_job_id}`` where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP - region. + *project_number* and ``location_id`` refers to a Google + Cloud region. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1319,8 +1319,8 @@ async def sample_list_data_scan_jobs(): Required. The resource name of the parent environment: ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP - region. + *project_number* and ``location_id`` refers to a Google + Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py index ec713fafb500..ab1a281ce8e8 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/client.py @@ -881,7 +881,7 @@ def sample_create_data_scan(): Required. The resource name of the parent location: ``projects/{project}/locations/{location_id}`` where ``project`` refers to a *project_id* or *project_number* - and ``location_id`` refers to a GCP region. + and ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1200,8 +1200,8 @@ def sample_delete_data_scan(): Required. The resource name of the dataScan: ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP - region. + *project_number* and ``location_id`` refers to a Google + Cloud region. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1328,8 +1328,8 @@ def sample_get_data_scan(): Required. The resource name of the dataScan: ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP - region. + *project_number* and ``location_id`` refers to a Google + Cloud region. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1458,7 +1458,7 @@ def sample_list_data_scans(): Required. The resource name of the parent location: ``projects/{project}/locations/{location_id}`` where ``project`` refers to a *project_id* or *project_number* - and ``location_id`` refers to a GCP region. + and ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1581,8 +1581,8 @@ def sample_run_data_scan(): Required. The resource name of the DataScan: ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}``. where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP - region. + *project_number* and ``location_id`` refers to a Google + Cloud region. Only **OnDemand** data scans are allowed. @@ -1691,8 +1691,8 @@ def sample_get_data_scan_job(): Required. The resource name of the DataScanJob: ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}/jobs/{data_scan_job_id}`` where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP - region. + *project_number* and ``location_id`` refers to a Google + Cloud region. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1802,8 +1802,8 @@ def sample_list_data_scan_jobs(): Required. The resource name of the parent environment: ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP - region. + *project_number* and ``location_id`` refers to a Google + Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/transports/rest.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/transports/rest.py index 0d1fa658b5af..61f6cf277ce8 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/transports/rest.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/transports/rest.py @@ -872,7 +872,7 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: }, { "method": "get", - "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + "uri": "/v1/{name=organizations/*/locations/*}/operations", }, ], } diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/transports/rest_base.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/transports/rest_base.py index 5e859cc8d6c2..96597b805754 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/transports/rest_base.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_scan_service/transports/rest_base.py @@ -718,7 +718,7 @@ def _get_http_options(): }, { "method": "get", - "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + "uri": "/v1/{name=organizations/*/locations/*}/operations", }, ] return http_options diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py index 032cca13e66a..641733050837 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/async_client.py @@ -802,7 +802,7 @@ async def sample_list_data_taxonomies(): Required. The resource name of the DataTaxonomy location, of the form: projects/{project_number}/locations/{location_id} where - ``location_id`` refers to a GCP region. + ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py index 2691daa384cb..6082f72ab552 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/client.py @@ -1250,7 +1250,7 @@ def sample_list_data_taxonomies(): Required. The resource name of the DataTaxonomy location, of the form: projects/{project_number}/locations/{location_id} where - ``location_id`` refers to a GCP region. + ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/rest.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/rest.py index 256f5e7e1828..015996d4fc74 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/rest.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/rest.py @@ -1229,7 +1229,7 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: }, { "method": "get", - "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + "uri": "/v1/{name=organizations/*/locations/*}/operations", }, ], } diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/rest_base.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/rest_base.py index 5c930ba39065..423e13010b14 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/rest_base.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/data_taxonomy_service/transports/rest_base.py @@ -1028,7 +1028,7 @@ def _get_http_options(): }, { "method": "get", - "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + "uri": "/v1/{name=organizations/*/locations/*}/operations", }, ] return http_options diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/async_client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/async_client.py index fdc921e1142c..cd442b454512 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/async_client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/async_client.py @@ -369,7 +369,7 @@ async def sample_create_lake(): parent (:class:`str`): Required. The resource name of the lake location, of the form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -802,7 +802,7 @@ async def sample_list_lakes(): Required. The resource name of the lake location, of the form: ``projects/{project_number}/locations/{location_id}`` - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/client.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/client.py index 6f8f6e5c4eb5..39bfaead92e7 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/client.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/client.py @@ -956,7 +956,7 @@ def sample_create_lake(): parent (str): Required. The resource name of the lake location, of the form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1380,7 +1380,7 @@ def sample_list_lakes(): Required. The resource name of the lake location, of the form: ``projects/{project_number}/locations/{location_id}`` - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/transports/rest.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/transports/rest.py index 0dc13af97524..0e083f5373c2 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/transports/rest.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/transports/rest.py @@ -2113,7 +2113,7 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: }, { "method": "get", - "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + "uri": "/v1/{name=organizations/*/locations/*}/operations", }, ], } diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/transports/rest_base.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/transports/rest_base.py index 8f41186c5000..6839dc53bd63 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/transports/rest_base.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/dataplex_service/transports/rest_base.py @@ -1940,7 +1940,7 @@ def _get_http_options(): }, { "method": "get", - "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + "uri": "/v1/{name=organizations/*/locations/*}/operations", }, ] return http_options diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/rest_base.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/rest_base.py index 8815f1509c1c..5850fd61fa1b 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/rest_base.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/services/metadata_service/transports/rest_base.py @@ -706,7 +706,7 @@ def _get_http_options(): }, { "method": "get", - "uri": "/v1/{name=organizations/*/locations/*/operations/*}", + "uri": "/v1/{name=organizations/*/locations/*}/operations", }, ] return http_options diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/__init__.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/__init__.py index 095590f45d4e..b33e533b219c 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/__init__.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/__init__.py @@ -14,6 +14,29 @@ # limitations under the License. # from .analyze import Content, Environment, Session +from .business_glossary import ( + CreateGlossaryCategoryRequest, + CreateGlossaryRequest, + CreateGlossaryTermRequest, + DeleteGlossaryCategoryRequest, + DeleteGlossaryRequest, + DeleteGlossaryTermRequest, + GetGlossaryCategoryRequest, + GetGlossaryRequest, + GetGlossaryTermRequest, + Glossary, + GlossaryCategory, + GlossaryTerm, + ListGlossariesRequest, + ListGlossariesResponse, + ListGlossaryCategoriesRequest, + ListGlossaryCategoriesResponse, + ListGlossaryTermsRequest, + ListGlossaryTermsResponse, + UpdateGlossaryCategoryRequest, + UpdateGlossaryRequest, + UpdateGlossaryTermRequest, +) from .catalog import ( Aspect, AspectSource, @@ -21,20 +44,24 @@ CancelMetadataJobRequest, CreateAspectTypeRequest, CreateEntryGroupRequest, + CreateEntryLinkRequest, CreateEntryRequest, CreateEntryTypeRequest, CreateMetadataJobRequest, DeleteAspectTypeRequest, DeleteEntryGroupRequest, + DeleteEntryLinkRequest, DeleteEntryRequest, DeleteEntryTypeRequest, Entry, EntryGroup, + EntryLink, EntrySource, EntryType, EntryView, GetAspectTypeRequest, GetEntryGroupRequest, + GetEntryLinkRequest, GetEntryRequest, GetEntryTypeRequest, GetMetadataJobRequest, @@ -129,6 +156,7 @@ RunDataScanResponse, UpdateDataScanRequest, ) +from .datascans_common import DataScanCatalogPublishingStatus from .logs import ( BusinessGlossaryEvent, DataQualityScanRuleResult, @@ -212,25 +240,50 @@ "Content", "Environment", "Session", + "CreateGlossaryCategoryRequest", + "CreateGlossaryRequest", + "CreateGlossaryTermRequest", + "DeleteGlossaryCategoryRequest", + "DeleteGlossaryRequest", + "DeleteGlossaryTermRequest", + "GetGlossaryCategoryRequest", + "GetGlossaryRequest", + "GetGlossaryTermRequest", + "Glossary", + "GlossaryCategory", + "GlossaryTerm", + "ListGlossariesRequest", + "ListGlossariesResponse", + "ListGlossaryCategoriesRequest", + "ListGlossaryCategoriesResponse", + "ListGlossaryTermsRequest", + "ListGlossaryTermsResponse", + "UpdateGlossaryCategoryRequest", + "UpdateGlossaryRequest", + "UpdateGlossaryTermRequest", "Aspect", "AspectSource", "AspectType", "CancelMetadataJobRequest", "CreateAspectTypeRequest", "CreateEntryGroupRequest", + "CreateEntryLinkRequest", "CreateEntryRequest", "CreateEntryTypeRequest", "CreateMetadataJobRequest", "DeleteAspectTypeRequest", "DeleteEntryGroupRequest", + "DeleteEntryLinkRequest", "DeleteEntryRequest", "DeleteEntryTypeRequest", "Entry", "EntryGroup", + "EntryLink", "EntrySource", "EntryType", "GetAspectTypeRequest", "GetEntryGroupRequest", + "GetEntryLinkRequest", "GetEntryRequest", "GetEntryTypeRequest", "GetMetadataJobRequest", @@ -317,6 +370,7 @@ "RunDataScanResponse", "UpdateDataScanRequest", "DataScanType", + "DataScanCatalogPublishingStatus", "BusinessGlossaryEvent", "DataQualityScanRuleResult", "DataScanEvent", diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/analyze.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/analyze.py index 8401a158be6d..540dffe6b381 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/analyze.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/analyze.py @@ -130,7 +130,8 @@ class OsImageRuntime(proto.Message): Attributes: image_version (str): - Required. Dataplex Image version. + Required. Dataplex Universal Catalog Image + version. java_libraries (MutableSequence[str]): Optional. List of Java jars to be included in the runtime environment. Valid input includes diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/business_glossary.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/business_glossary.py new file mode 100644 index 000000000000..199eea0392af --- /dev/null +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/business_glossary.py @@ -0,0 +1,868 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.dataplex.v1", + manifest={ + "Glossary", + "GlossaryCategory", + "GlossaryTerm", + "CreateGlossaryRequest", + "UpdateGlossaryRequest", + "DeleteGlossaryRequest", + "GetGlossaryRequest", + "ListGlossariesRequest", + "ListGlossariesResponse", + "CreateGlossaryCategoryRequest", + "UpdateGlossaryCategoryRequest", + "DeleteGlossaryCategoryRequest", + "GetGlossaryCategoryRequest", + "ListGlossaryCategoriesRequest", + "ListGlossaryCategoriesResponse", + "CreateGlossaryTermRequest", + "UpdateGlossaryTermRequest", + "DeleteGlossaryTermRequest", + "GetGlossaryTermRequest", + "ListGlossaryTermsRequest", + "ListGlossaryTermsResponse", + }, +) + + +class Glossary(proto.Message): + r"""A Glossary represents a collection of GlossaryCategories and + GlossaryTerms defined by the user. Glossary is a top level + resource and is the Google Cloud parent resource of all the + GlossaryCategories and GlossaryTerms within it. + + Attributes: + name (str): + Output only. Identifier. The resource name of the Glossary. + Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + uid (str): + Output only. System generated unique id for + the Glossary. This ID will be different if the + Glossary is deleted and re-created with the same + name. + display_name (str): + Optional. User friendly display name of the + Glossary. This is user-mutable. This will be + same as the GlossaryId, if not specified. + description (str): + Optional. The user-mutable description of the + Glossary. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the Glossary + was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the Glossary + was last updated. + labels (MutableMapping[str, str]): + Optional. User-defined labels for the + Glossary. + term_count (int): + Output only. The number of GlossaryTerms in + the Glossary. + category_count (int): + Output only. The number of GlossaryCategories + in the Glossary. + etag (str): + Optional. Needed for resource freshness + validation. This checksum is computed by the + server based on the value of other fields, and + may be sent on update and delete requests to + ensure the client has an up-to-date value before + proceeding. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + description: str = proto.Field( + proto.STRING, + number=4, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + term_count: int = proto.Field( + proto.INT32, + number=8, + ) + category_count: int = proto.Field( + proto.INT32, + number=9, + ) + etag: str = proto.Field( + proto.STRING, + number=10, + ) + + +class GlossaryCategory(proto.Message): + r"""A GlossaryCategory represents a collection of + GlossaryCategories and GlossaryTerms within a Glossary that are + related to each other. + + Attributes: + name (str): + Output only. Identifier. The resource name of the + GlossaryCategory. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id} + uid (str): + Output only. System generated unique id for + the GlossaryCategory. This ID will be different + if the GlossaryCategory is deleted and + re-created with the same name. + display_name (str): + Optional. User friendly display name of the + GlossaryCategory. This is user-mutable. This + will be same as the GlossaryCategoryId, if not + specified. + description (str): + Optional. The user-mutable description of the + GlossaryCategory. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the + GlossaryCategory was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the + GlossaryCategory was last updated. + labels (MutableMapping[str, str]): + Optional. User-defined labels for the + GlossaryCategory. + parent (str): + Required. The immediate parent of the GlossaryCategory in + the resource-hierarchy. It can either be a Glossary or a + GlossaryCategory. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + OR + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + description: str = proto.Field( + proto.STRING, + number=4, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + parent: str = proto.Field( + proto.STRING, + number=8, + ) + + +class GlossaryTerm(proto.Message): + r"""GlossaryTerms are the core of Glossary. + A GlossaryTerm holds a rich text description that can be + attached to Entries or specific columns to enrich them. + + Attributes: + name (str): + Output only. Identifier. The resource name of the + GlossaryTerm. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/terms/{term_id} + uid (str): + Output only. System generated unique id for + the GlossaryTerm. This ID will be different if + the GlossaryTerm is deleted and re-created with + the same name. + display_name (str): + Optional. User friendly display name of the + GlossaryTerm. This is user-mutable. This will be + same as the GlossaryTermId, if not specified. + description (str): + Optional. The user-mutable description of the + GlossaryTerm. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the + GlossaryTerm was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time at which the + GlossaryTerm was last updated. + labels (MutableMapping[str, str]): + Optional. User-defined labels for the + GlossaryTerm. + parent (str): + Required. The immediate parent of the GlossaryTerm in the + resource-hierarchy. It can either be a Glossary or a + GlossaryCategory. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + OR + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + description: str = proto.Field( + proto.STRING, + number=4, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=7, + ) + parent: str = proto.Field( + proto.STRING, + number=8, + ) + + +class CreateGlossaryRequest(proto.Message): + r"""Create Glossary Request + + Attributes: + parent (str): + Required. The parent resource where this Glossary will be + created. Format: + projects/{project_id_or_number}/locations/{location_id} + where ``location_id`` refers to a Google Cloud region. + glossary_id (str): + Required. Glossary ID: Glossary identifier. + glossary (google.cloud.dataplex_v1.types.Glossary): + Required. The Glossary to create. + validate_only (bool): + Optional. Validates the request without + actually creating the Glossary. Default: false. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + glossary_id: str = proto.Field( + proto.STRING, + number=2, + ) + glossary: "Glossary" = proto.Field( + proto.MESSAGE, + number=3, + message="Glossary", + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class UpdateGlossaryRequest(proto.Message): + r"""Update Glossary Request + + Attributes: + glossary (google.cloud.dataplex_v1.types.Glossary): + Required. The Glossary to update. The Glossary's ``name`` + field is used to identify the Glossary to update. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. + validate_only (bool): + Optional. Validates the request without + actually updating the Glossary. Default: false. + """ + + glossary: "Glossary" = proto.Field( + proto.MESSAGE, + number=1, + message="Glossary", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +class DeleteGlossaryRequest(proto.Message): + r"""Delete Glossary Request + + Attributes: + name (str): + Required. The name of the Glossary to delete. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + etag (str): + Optional. The etag of the Glossary. + If this is provided, it must match the server's + etag. If the etag is provided and does not match + the server-computed etag, the request must fail + with a ABORTED error code. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetGlossaryRequest(proto.Message): + r"""Get Glossary Request + + Attributes: + name (str): + Required. The name of the Glossary to retrieve. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListGlossariesRequest(proto.Message): + r"""List Glossaries Request + + Attributes: + parent (str): + Required. The parent, which has this collection of + Glossaries. Format: + projects/{project_id_or_number}/locations/{location_id} + where ``location_id`` refers to a Google Cloud region. + page_size (int): + Optional. The maximum number of Glossaries to + return. The service may return fewer than this + value. If unspecified, at most 50 Glossaries + will be returned. The maximum value is 1000; + values above 1000 will be coerced to 1000. + page_token (str): + Optional. A page token, received from a previous + ``ListGlossaries`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters + provided to ``ListGlossaries`` must match the call that + provided the page token. + filter (str): + Optional. Filter expression that filters Glossaries listed + in the response. Filters on proto fields of Glossary are + supported. Examples of using a filter are: + + - ``display_name="my-glossary"`` + - ``categoryCount=1`` + - ``termCount=0`` + order_by (str): + Optional. Order by expression that orders Glossaries listed + in the response. Order by fields are: ``name`` or + ``create_time`` for the result. If not specified, the + ordering is undefined. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListGlossariesResponse(proto.Message): + r"""List Glossaries Response + + Attributes: + glossaries (MutableSequence[google.cloud.dataplex_v1.types.Glossary]): + Lists the Glossaries in the specified parent. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable_locations (MutableSequence[str]): + Locations that the service couldn't reach. + """ + + @property + def raw_page(self): + return self + + glossaries: MutableSequence["Glossary"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Glossary", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CreateGlossaryCategoryRequest(proto.Message): + r"""Creates a new GlossaryCategory under the specified Glossary. + + Attributes: + parent (str): + Required. The parent resource where this GlossaryCategory + will be created. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + where ``locationId`` refers to a Google Cloud region. + category_id (str): + Required. GlossaryCategory identifier. + category (google.cloud.dataplex_v1.types.GlossaryCategory): + Required. The GlossaryCategory to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + category_id: str = proto.Field( + proto.STRING, + number=2, + ) + category: "GlossaryCategory" = proto.Field( + proto.MESSAGE, + number=3, + message="GlossaryCategory", + ) + + +class UpdateGlossaryCategoryRequest(proto.Message): + r"""Update GlossaryCategory Request + + Attributes: + category (google.cloud.dataplex_v1.types.GlossaryCategory): + Required. The GlossaryCategory to update. The + GlossaryCategory's ``name`` field is used to identify the + GlossaryCategory to update. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id} + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. + """ + + category: "GlossaryCategory" = proto.Field( + proto.MESSAGE, + number=1, + message="GlossaryCategory", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteGlossaryCategoryRequest(proto.Message): + r"""Delete GlossaryCategory Request + + Attributes: + name (str): + Required. The name of the GlossaryCategory to delete. + Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetGlossaryCategoryRequest(proto.Message): + r"""Get GlossaryCategory Request + + Attributes: + name (str): + Required. The name of the GlossaryCategory to retrieve. + Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListGlossaryCategoriesRequest(proto.Message): + r"""List GlossaryCategories Request + + Attributes: + parent (str): + Required. The parent, which has this collection of + GlossaryCategories. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + Location is the Google Cloud region. + page_size (int): + Optional. The maximum number of + GlossaryCategories to return. The service may + return fewer than this value. If unspecified, at + most 50 GlossaryCategories will be returned. The + maximum value is 1000; values above 1000 will be + coerced to 1000. + page_token (str): + Optional. A page token, received from a previous + ``ListGlossaryCategories`` call. Provide this to retrieve + the subsequent page. When paginating, all other parameters + provided to ``ListGlossaryCategories`` must match the call + that provided the page token. + filter (str): + Optional. Filter expression that filters GlossaryCategories + listed in the response. Filters are supported on the + following fields: + + - immediate_parent + + Examples of using a filter are: + + - ``immediate_parent="projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}"`` + - ``immediate_parent="projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id}"`` + + This will only return the GlossaryCategories that are + directly nested under the specified parent. + order_by (str): + Optional. Order by expression that orders GlossaryCategories + listed in the response. Order by fields are: ``name`` or + ``create_time`` for the result. If not specified, the + ordering is undefined. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListGlossaryCategoriesResponse(proto.Message): + r"""List GlossaryCategories Response + + Attributes: + categories (MutableSequence[google.cloud.dataplex_v1.types.GlossaryCategory]): + Lists the GlossaryCategories in the specified + parent. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable_locations (MutableSequence[str]): + Locations that the service couldn't reach. + """ + + @property + def raw_page(self): + return self + + categories: MutableSequence["GlossaryCategory"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="GlossaryCategory", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CreateGlossaryTermRequest(proto.Message): + r"""Creates a new GlossaryTerm under the specified Glossary. + + Attributes: + parent (str): + Required. The parent resource where the GlossaryTerm will be + created. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + where ``location_id`` refers to a Google Cloud region. + term_id (str): + Required. GlossaryTerm identifier. + term (google.cloud.dataplex_v1.types.GlossaryTerm): + Required. The GlossaryTerm to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + term_id: str = proto.Field( + proto.STRING, + number=2, + ) + term: "GlossaryTerm" = proto.Field( + proto.MESSAGE, + number=3, + message="GlossaryTerm", + ) + + +class UpdateGlossaryTermRequest(proto.Message): + r"""Update GlossaryTerm Request + + Attributes: + term (google.cloud.dataplex_v1.types.GlossaryTerm): + Required. The GlossaryTerm to update. The GlossaryTerm's + ``name`` field is used to identify the GlossaryTerm to + update. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/terms/{term_id} + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. The list of fields to update. + """ + + term: "GlossaryTerm" = proto.Field( + proto.MESSAGE, + number=1, + message="GlossaryTerm", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class DeleteGlossaryTermRequest(proto.Message): + r"""Delete GlossaryTerm Request + + Attributes: + name (str): + Required. The name of the GlossaryTerm to delete. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/terms/{term_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetGlossaryTermRequest(proto.Message): + r"""Get GlossaryTerm Request + + Attributes: + name (str): + Required. The name of the GlossaryTerm to retrieve. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/terms/{term_id} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListGlossaryTermsRequest(proto.Message): + r"""List GlossaryTerms Request + + Attributes: + parent (str): + Required. The parent, which has this collection of + GlossaryTerms. Format: + projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id} + where ``location_id`` refers to a Google Cloud region. + page_size (int): + Optional. The maximum number of GlossaryTerms + to return. The service may return fewer than + this value. If unspecified, at most 50 + GlossaryTerms will be returned. The maximum + value is 1000; values above 1000 will be coerced + to 1000. + page_token (str): + Optional. A page token, received from a previous + ``ListGlossaryTerms`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters + provided to ``ListGlossaryTerms`` must match the call that + provided the page token. + filter (str): + Optional. Filter expression that filters GlossaryTerms + listed in the response. Filters are supported on the + following fields: + + - immediate_parent + + Examples of using a filter are: + + - ``immediate_parent="projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}"`` + - ``immediate_parent="projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id}"`` + + This will only return the GlossaryTerms that are directly + nested under the specified parent. + order_by (str): + Optional. Order by expression that orders GlossaryTerms + listed in the response. Order by fields are: ``name`` or + ``create_time`` for the result. If not specified, the + ordering is undefined. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListGlossaryTermsResponse(proto.Message): + r"""List GlossaryTerms Response + + Attributes: + terms (MutableSequence[google.cloud.dataplex_v1.types.GlossaryTerm]): + Lists the GlossaryTerms in the specified + parent. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + unreachable_locations (MutableSequence[str]): + Locations that the service couldn't reach. + """ + + @property + def raw_page(self): + return self + + terms: MutableSequence["GlossaryTerm"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="GlossaryTerm", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable_locations: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py index e028f5e64efe..ee7e879c9bfa 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/catalog.py @@ -69,6 +69,10 @@ "ListMetadataJobsResponse", "CancelMetadataJobRequest", "MetadataJob", + "EntryLink", + "CreateEntryLinkRequest", + "DeleteEntryLinkRequest", + "GetEntryLinkRequest", }, ) @@ -169,14 +173,15 @@ class AspectType(proto.Message): """ class Authorization(proto.Message): - r"""Autorization for an AspectType. + r"""Authorization for an AspectType. Attributes: alternate_use_permission (str): Immutable. The IAM permission grantable on the EntryGroup to allow access to instantiate - Aspects of Dataplex owned AspectTypes, only - settable for Dataplex owned Types. + Aspects of Dataplex Universal Catalog owned + AspectTypes, only settable for Dataplex + Universal Catalog owned Types. """ alternate_use_permission: str = proto.Field( @@ -208,8 +213,8 @@ class MetadataTemplate(proto.Message): Primitive types: - string - - integer - - boolean + - int + - bool - double - datetime. Must be of the format RFC3339 UTC "Zulu" (Examples: "2014-10-02T15:01:23Z" and @@ -615,8 +620,9 @@ class Authorization(proto.Message): alternate_use_permission (str): Immutable. The IAM permission grantable on the Entry Group to allow access to instantiate - Entries of Dataplex owned Entry Types, only - settable for Dataplex owned Types. + Entries of Dataplex Universal Catalog owned + Entry Types, only settable for Dataplex + Universal Catalog owned Types. """ alternate_use_permission: str = proto.Field( @@ -787,10 +793,10 @@ class Entry(proto.Message): ``projects/{project_id_or_number}/locations/{location_id}/entryTypes/{entry_type_id}``. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time when the entry was - created in Dataplex. + created in Dataplex Universal Catalog. update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time when the entry was last - updated in Dataplex. + updated in Dataplex Universal Catalog. aspects (MutableMapping[str, google.cloud.dataplex_v1.types.Aspect]): Optional. The aspects that are attached to the entry. Depending on how the aspect is attached to the entry, the @@ -974,7 +980,7 @@ class CreateEntryGroupRequest(proto.Message): parent (str): Required. The resource name of the entryGroup, of the form: projects/{project_number}/locations/{location_id} where - ``location_id`` refers to a GCP region. + ``location_id`` refers to a Google Cloud region. entry_group_id (str): Required. EntryGroup identifier. entry_group (google.cloud.dataplex_v1.types.EntryGroup): @@ -1223,7 +1229,7 @@ class UpdateEntryTypeRequest(proto.Message): class DeleteEntryTypeRequest(proto.Message): - r"""Delele EntryType Request. + r"""Delete EntryType Request. Attributes: name (str): @@ -1419,7 +1425,7 @@ class UpdateAspectTypeRequest(proto.Message): class DeleteAspectTypeRequest(proto.Message): - r"""Delele AspectType Request. + r"""Delete AspectType Request. Attributes: name (str): @@ -1859,11 +1865,11 @@ class SearchEntriesRequest(proto.Message): name (str): Required. The project to which the request should be attributed in the following form: - ``projects/{project}/locations/{location}``. + ``projects/{project}/locations/global``. query (str): Required. The query against which entries in scope should be matched. The query syntax is defined in `Search syntax for - Dataplex + Dataplex Universal Catalog `__. page_size (int): Optional. Number of results in the search page. If <=0, then @@ -1877,7 +1883,7 @@ class SearchEntriesRequest(proto.Message): Optional. Specifies the ordering of results. Supported values are: - - ``relevance`` (default) + - ``relevance`` - ``last_modified_timestamp`` - ``last_modified_timestamp asc`` scope (str): @@ -2023,17 +2029,22 @@ class ImportItem(proto.Message): entry (google.cloud.dataplex_v1.types.Entry): Information about an entry and its attached aspects. + entry_link (google.cloud.dataplex_v1.types.EntryLink): + Information about the entry link. User should provide either + one of the entry or entry_link. While providing entry_link, + user should not provide update_mask and aspect_keys. update_mask (google.protobuf.field_mask_pb2.FieldMask): The fields to update, in paths that are relative to the ``Entry`` resource. Separate each field with a comma. - In ``FULL`` entry sync mode, Dataplex includes the paths of - all of the fields for an entry that can be modified, - including aspects. This means that Dataplex replaces the - existing entry with the entry in the metadata import file. - All modifiable fields are updated, regardless of the fields - that are listed in the update mask, and regardless of - whether a field is present in the ``entry`` object. + In ``FULL`` entry sync mode, Dataplex Universal Catalog + includes the paths of all of the fields for an entry that + can be modified, including aspects. This means that Dataplex + Universal Catalog replaces the existing entry with the entry + in the metadata import file. All modifiable fields are + updated, regardless of the fields that are listed in the + update mask, and regardless of whether a field is present in + the ``entry`` object. The ``update_mask`` field is ignored when an entry is created or re-created. @@ -2041,10 +2052,11 @@ class ImportItem(proto.Message): In an aspect-only metadata job (when entry sync mode is ``NONE``), set this value to ``aspects``. - Dataplex also determines which entries and aspects to modify - by comparing the values and timestamps that you provide in - the metadata import file with the values and timestamps that - exist in your project. For more information, see `Comparison + Dataplex Universal Catalog also determines which entries and + aspects to modify by comparing the values and timestamps + that you provide in the metadata import file with the values + and timestamps that exist in your project. For more + information, see `Comparison logic `__. aspect_keys (MutableSequence[str]): The aspects to modify. Supports the following syntaxes: @@ -2065,8 +2077,9 @@ class ImportItem(proto.Message): In ``FULL`` entry sync mode, if you leave this field empty, it is treated as specifying exactly those aspects that are - present within the specified entry. Dataplex implicitly adds - the keys for all of the required aspects of an entry. + present within the specified entry. Dataplex Universal + Catalog implicitly adds the keys for all of the required + aspects of an entry. """ entry: "Entry" = proto.Field( @@ -2074,6 +2087,11 @@ class ImportItem(proto.Message): number=1, message="Entry", ) + entry_link: "EntryLink" = proto.Field( + proto.MESSAGE, + number=4, + message="EntryLink", + ) update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, number=2, @@ -2332,6 +2350,15 @@ class ImportJobResult(proto.Message): update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time when the status was updated. + deleted_entry_links (int): + Output only. The total number of entry links + that were successfully deleted. + created_entry_links (int): + Output only. The total number of entry links + that were successfully created. + unchanged_entry_links (int): + Output only. The total number of entry links + that were left unchanged. """ deleted_entries: int = proto.Field( @@ -2359,6 +2386,18 @@ class ImportJobResult(proto.Message): number=5, message=timestamp_pb2.Timestamp, ) + deleted_entry_links: int = proto.Field( + proto.INT64, + number=7, + ) + created_entry_links: int = proto.Field( + proto.INT64, + number=8, + ) + unchanged_entry_links: int = proto.Field( + proto.INT64, + number=9, + ) class ExportJobResult(proto.Message): r"""Summary results from a metadata export job. The results are a @@ -2451,11 +2490,12 @@ class SyncMode(proto.Enum): Sync mode unspecified. FULL (1): All resources in the job's scope are - modified. If a resource exists in Dataplex but - isn't included in the metadata import file, the - resource is deleted when you run the metadata - job. Use this mode to perform a full sync of the - set of entries in the job scope. + modified. If a resource exists in Dataplex + Universal Catalog but isn't included in the + metadata import file, the resource is deleted + when you run the metadata job. Use this mode to + perform a full sync of the set of entries in the + job scope. This sync mode is supported for entries. INCREMENTAL (2): @@ -2550,6 +2590,41 @@ class ImportJobScope(proto.Message): The location of an aspect type must either match the location of the job, or the aspect type must be global. + glossaries (MutableSequence[str]): + Optional. The glossaries that are in scope for the import + job, specified as relative resource names in the format + ``projects/{project_number_or_id}/locations/{location_id}/glossaries/{glossary_id}``. + + While importing Business Glossary entries, the user must + provide glossaries. While importing entries, the user does + not have to provide glossaries. If the metadata import file + attempts to modify Business Glossary entries whose glossary + isn't included in this list, the import job will skip those + entries. + + The location of a glossary must either match the location of + the job, or the glossary must be global. + entry_link_types (MutableSequence[str]): + Optional. The entry link types that are in scope for the + import job, specified as relative resource names in the + format + ``projects/{project_number_or_id}/locations/{location_id}/entryLinkTypes/{entry_link_type_id}``. + The job modifies only the entryLinks that belong to these + entry link types. + + If the metadata import file attempts to create or delete an + entry link whose entry link type isn't included in this + list, the import job will skip those entry links. + referenced_entry_scopes (MutableSequence[str]): + Optional. Defines the scope of entries that can be + referenced in the entry links. + + Currently, projects are supported as valid scopes. Format: + ``projects/{project_number_or_id}`` + + If the metadata import file attempts to create an entry link + which references an entry that is not in the scope, the + import job will skip that entry link. """ entry_groups: MutableSequence[str] = proto.RepeatedField( @@ -2564,6 +2639,18 @@ class ImportJobScope(proto.Message): proto.STRING, number=3, ) + glossaries: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + entry_link_types: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + referenced_entry_scopes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=6, + ) source_storage_uri: str = proto.Field( proto.STRING, @@ -2607,9 +2694,9 @@ class ExportJobSpec(proto.Message): You can optionally specify a custom prefix after the bucket name, in the format ``gs://{bucket}/{prefix}/``. The maximum length of the custom prefix is 128 characters. Dataplex - constructs the object path for the exported files by using - the bucket name and prefix that you provide, followed by a - system-generated path. + Universal Catalog constructs the object path for the + exported files by using the bucket name and prefix that you + provide, followed by a system-generated path. The bucket must be in the same VPC Service Controls perimeter as the job. @@ -2825,4 +2912,174 @@ class State(proto.Enum): ) +class EntryLink(proto.Message): + r"""EntryLink represents a link between two Entries. + + Attributes: + name (str): + Output only. Immutable. Identifier. The relative resource + name of the Entry Link, of the form: + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entryLinks/{entry_link_id}`` + entry_link_type (str): + Required. Immutable. Relative resource name of the Entry + Link Type used to create this Entry Link. For example: + + - Entry link between synonym terms in a glossary: + ``projects/dataplex-types/locations/global/entryLinkTypes/synonym`` + - Entry link between related terms in a glossary: + ``projects/dataplex-types/locations/global/entryLinkTypes/related`` + - Entry link between glossary terms and data assets: + ``projects/dataplex-types/locations/global/entryLinkTypes/definition`` + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the Entry Link was + created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the Entry Link was + last updated. + entry_references (MutableSequence[google.cloud.dataplex_v1.types.EntryLink.EntryReference]): + Required. Specifies the Entries referenced in + the Entry Link. There should be exactly two + entry references. + """ + + class EntryReference(proto.Message): + r"""Reference to the Entry that is linked through the Entry Link. + + Attributes: + name (str): + Required. Immutable. The relative resource name of the + referenced Entry, of the form: + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entries/{entry_id}`` + path (str): + Immutable. The path in the Entry that is + referenced in the Entry Link. Empty path denotes + that the Entry itself is referenced in the Entry + Link. + type_ (google.cloud.dataplex_v1.types.EntryLink.EntryReference.Type): + Required. Immutable. The reference type of + the Entry. + """ + + class Type(proto.Enum): + r"""Reference type of the Entry. + + Values: + UNSPECIFIED (0): + Unspecified reference type. Implies that the + Entry is referenced in a non-directional Entry + Link. + SOURCE (2): + The Entry is referenced as the source of the + directional Entry Link. + TARGET (3): + The Entry is referenced as the target of the + directional Entry Link. + """ + UNSPECIFIED = 0 + SOURCE = 2 + TARGET = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + path: str = proto.Field( + proto.STRING, + number=2, + ) + type_: "EntryLink.EntryReference.Type" = proto.Field( + proto.ENUM, + number=3, + enum="EntryLink.EntryReference.Type", + ) + + name: str = proto.Field( + proto.STRING, + number=1, + ) + entry_link_type: str = proto.Field( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + entry_references: MutableSequence[EntryReference] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message=EntryReference, + ) + + +class CreateEntryLinkRequest(proto.Message): + r"""Request message for CreateEntryLink. + + Attributes: + parent (str): + Required. The resource name of the parent Entry Group: + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}``. + entry_link_id (str): + Required. Entry Link identifier + + - Must contain only lowercase letters, numbers and hyphens. + - Must start with a letter. + - Must be between 1-63 characters. + - Must end with a number or a letter. + - Must be unique within the EntryGroup. + entry_link (google.cloud.dataplex_v1.types.EntryLink): + Required. Entry Link resource. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + entry_link_id: str = proto.Field( + proto.STRING, + number=2, + ) + entry_link: "EntryLink" = proto.Field( + proto.MESSAGE, + number=3, + message="EntryLink", + ) + + +class DeleteEntryLinkRequest(proto.Message): + r"""Request message for DeleteEntryLink. + + Attributes: + name (str): + Required. The resource name of the Entry Link: + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entryLinks/{entry_link_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class GetEntryLinkRequest(proto.Message): + r"""Request message for GetEntryLink. + + Attributes: + name (str): + Required. The resource name of the Entry Link: + ``projects/{project_id_or_number}/locations/{location_id}/entryGroups/{entry_group_id}/entryLinks/{entry_link_id}``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_discovery.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_discovery.py index eb5ff0a710ba..0e571059f10d 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_discovery.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_discovery.py @@ -81,6 +81,11 @@ class BigQueryPublishingConfig(proto.Message): For supported values, refer to https://cloud.google.com/bigquery/docs/locations#supported_locations. + project (str): + Optional. The project of the BigQuery dataset to publish + BigLake external or non-BigLake external tables to. If not + specified, the project of the Cloud Storage bucket will be + used. The format is "projects/{project_id_or_number}". """ class TableType(proto.Enum): @@ -119,6 +124,10 @@ class TableType(proto.Enum): proto.STRING, number=4, ) + project: str = proto.Field( + proto.STRING, + number=5, + ) class StorageConfig(proto.Message): r"""Configurations related to Cloud Storage as the data source. diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_profile.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_profile.py index e4d3680a5ee8..9c254e7f8f9c 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_profile.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_profile.py @@ -45,11 +45,10 @@ class DataProfileSpec(proto.Message): 100. row_filter (str): - Optional. A filter applied to all rows in a single DataScan - job. The filter needs to be a valid SQL expression for a - `WHERE clause in GoogleSQL - syntax `__. - + Optional. A filter applied to all rows in a + single DataScan job. The filter needs to be a + valid SQL expression for a WHERE clause in + BigQuery standard SQL syntax. Example: col1 >= 0 AND col2 < 10 post_scan_actions (google.cloud.dataplex_v1.types.DataProfileSpec.PostScanActions): Optional. Actions to take upon job @@ -85,7 +84,6 @@ class BigQueryExport(proto.Message): Optional. The BigQuery table to export DataProfileScan results to. Format: //bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID - or projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID """ results_table: str = proto.Field( @@ -151,11 +149,13 @@ class DataProfileResult(proto.Message): Attributes: row_count (int): - The count of rows scanned. + Output only. The count of rows scanned. profile (google.cloud.dataplex_v1.types.DataProfileResult.Profile): - The profile information per field. + Output only. The profile information per + field. scanned_data (google.cloud.dataplex_v1.types.ScannedData): - The data scanned for this result. + Output only. The data scanned for this + result. post_scan_actions_result (google.cloud.dataplex_v1.types.DataProfileResult.PostScanActionsResult): Output only. The result of post scan actions. """ @@ -166,8 +166,8 @@ class Profile(proto.Message): Attributes: fields (MutableSequence[google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field]): - List of fields with structural and profile - information for each field. + Output only. List of fields with structural + and profile information for each field. """ class Field(proto.Message): @@ -175,23 +175,23 @@ class Field(proto.Message): Attributes: name (str): - The name of the field. + Output only. The name of the field. type_ (str): - The data type retrieved from the schema of the data source. - For instance, for a BigQuery native table, it is the - `BigQuery Table + Output only. The data type retrieved from the schema of the + data source. For instance, for a BigQuery native table, it + is the `BigQuery Table Schema `__. - For a Dataplex Entity, it is the `Entity + For a Dataplex Universal Catalog Entity, it is the `Entity Schema `__. mode (str): - The mode of the field. Possible values include: + Output only. The mode of the field. Possible values include: - REQUIRED, if it is a required field. - NULLABLE, if it is an optional field. - REPEATED, if it is a repeated field. profile (google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field.ProfileInfo): - Profile information for the corresponding - field. + Output only. Profile information for the + corresponding field. """ class ProfileInfo(proto.Message): @@ -206,22 +206,23 @@ class ProfileInfo(proto.Message): Attributes: null_ratio (float): - Ratio of rows with null value against total - scanned rows. + Output only. Ratio of rows with null value + against total scanned rows. distinct_ratio (float): - Ratio of rows with distinct values against - total scanned rows. Not available for complex + Output only. Ratio of rows with distinct + values against total scanned rows. Not available + for complex non-groupable field type, including + RECORD, ARRAY, GEOGRAPHY, and JSON, as well as + fields with REPEATABLE mode. + top_n_values (MutableSequence[google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field.ProfileInfo.TopNValue]): + Output only. The list of top N non-null + values, frequency and ratio with which they + occur in the scanned data. N is 10 or equal to + the number of distinct values in the field, + whichever is smaller. Not available for complex non-groupable field type, including RECORD, ARRAY, GEOGRAPHY, and JSON, as well as fields with REPEATABLE mode. - top_n_values (MutableSequence[google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field.ProfileInfo.TopNValue]): - The list of top N non-null values, frequency - and ratio with which they occur in the scanned - data. N is 10 or equal to the number of distinct - values in the field, whichever is smaller. Not - available for complex non-groupable field type, - including RECORD, ARRAY, GEOGRAPHY, and JSON, as - well as fields with REPEATABLE mode. string_profile (google.cloud.dataplex_v1.types.DataProfileResult.Profile.Field.ProfileInfo.StringFieldInfo): String type field information. @@ -241,14 +242,14 @@ class StringFieldInfo(proto.Message): Attributes: min_length (int): - Minimum length of non-null values in the - scanned data. + Output only. Minimum length of non-null + values in the scanned data. max_length (int): - Maximum length of non-null values in the - scanned data. + Output only. Maximum length of non-null + values in the scanned data. average_length (float): - Average length of non-null values in the - scanned data. + Output only. Average length of non-null + values in the scanned data. """ min_length: int = proto.Field( @@ -269,34 +270,35 @@ class IntegerFieldInfo(proto.Message): Attributes: average (float): - Average of non-null values in the scanned - data. NaN, if the field has a NaN. + Output only. Average of non-null values in + the scanned data. NaN, if the field has a NaN. standard_deviation (float): - Standard deviation of non-null values in the - scanned data. NaN, if the field has a NaN. + Output only. Standard deviation of non-null + values in the scanned data. NaN, if the field + has a NaN. min_ (int): - Minimum of non-null values in the scanned - data. NaN, if the field has a NaN. + Output only. Minimum of non-null values in + the scanned data. NaN, if the field has a NaN. quartiles (MutableSequence[int]): - A quartile divides the number of data points - into four parts, or quarters, of more-or-less - equal size. Three main quartiles used are: The - first quartile (Q1) splits off the lowest 25% of - data from the highest 75%. It is also known as - the lower or 25th empirical quartile, as 25% of - the data is below this point. The second - quartile (Q2) is the median of a data set. So, - 50% of the data lies below this point. The third - quartile (Q3) splits off the highest 25% of data - from the lowest 75%. It is known as the upper or - 75th empirical quartile, as 75% of the data lies - below this point. Here, the quartiles is - provided as an ordered list of approximate - quartile values for the scanned data, occurring - in order Q1, median, Q3. + Output only. A quartile divides the number of + data points into four parts, or quarters, of + more-or-less equal size. Three main quartiles + used are: The first quartile (Q1) splits off the + lowest 25% of data from the highest 75%. It is + also known as the lower or 25th empirical + quartile, as 25% of the data is below this + point. The second quartile (Q2) is the median of + a data set. So, 50% of the data lies below this + point. The third quartile (Q3) splits off the + highest 25% of data from the lowest 75%. It is + known as the upper or 75th empirical quartile, + as 75% of the data lies below this point. Here, + the quartiles is provided as an ordered list of + approximate quartile values for the scanned + data, occurring in order Q1, median, Q3. max_ (int): - Maximum of non-null values in the scanned - data. NaN, if the field has a NaN. + Output only. Maximum of non-null values in + the scanned data. NaN, if the field has a NaN. """ average: float = proto.Field( @@ -325,34 +327,35 @@ class DoubleFieldInfo(proto.Message): Attributes: average (float): - Average of non-null values in the scanned - data. NaN, if the field has a NaN. + Output only. Average of non-null values in + the scanned data. NaN, if the field has a NaN. standard_deviation (float): - Standard deviation of non-null values in the - scanned data. NaN, if the field has a NaN. + Output only. Standard deviation of non-null + values in the scanned data. NaN, if the field + has a NaN. min_ (float): - Minimum of non-null values in the scanned - data. NaN, if the field has a NaN. + Output only. Minimum of non-null values in + the scanned data. NaN, if the field has a NaN. quartiles (MutableSequence[float]): - A quartile divides the number of data points - into four parts, or quarters, of more-or-less - equal size. Three main quartiles used are: The - first quartile (Q1) splits off the lowest 25% of - data from the highest 75%. It is also known as - the lower or 25th empirical quartile, as 25% of - the data is below this point. The second - quartile (Q2) is the median of a data set. So, - 50% of the data lies below this point. The third - quartile (Q3) splits off the highest 25% of data - from the lowest 75%. It is known as the upper or - 75th empirical quartile, as 75% of the data lies - below this point. Here, the quartiles is - provided as an ordered list of quartile values - for the scanned data, occurring in order Q1, - median, Q3. + Output only. A quartile divides the number of + data points into four parts, or quarters, of + more-or-less equal size. Three main quartiles + used are: The first quartile (Q1) splits off the + lowest 25% of data from the highest 75%. It is + also known as the lower or 25th empirical + quartile, as 25% of the data is below this + point. The second quartile (Q2) is the median of + a data set. So, 50% of the data lies below this + point. The third quartile (Q3) splits off the + highest 25% of data from the lowest 75%. It is + known as the upper or 75th empirical quartile, + as 75% of the data lies below this point. Here, + the quartiles is provided as an ordered list of + quartile values for the scanned data, occurring + in order Q1, median, Q3. max_ (float): - Maximum of non-null values in the scanned - data. NaN, if the field has a NaN. + Output only. Maximum of non-null values in + the scanned data. NaN, if the field has a NaN. """ average: float = proto.Field( @@ -381,14 +384,15 @@ class TopNValue(proto.Message): Attributes: value (str): - String value of a top N non-null value. + Output only. String value of a top N non-null + value. count (int): - Count of the corresponding value in the - scanned data. + Output only. Count of the corresponding value + in the scanned data. ratio (float): - Ratio of the corresponding value in the field - against the total number of rows in the scanned - data. + Output only. Ratio of the corresponding value + in the field against the total number of rows in + the scanned data. """ value: str = proto.Field( diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_quality.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_quality.py index 7bd150fde2ce..23dc6dea5c1f 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_quality.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_quality.py @@ -19,7 +19,7 @@ import proto # type: ignore -from google.cloud.dataplex_v1.types import processing +from google.cloud.dataplex_v1.types import datascans_common, processing __protobuf__ = proto.module( package="google.cloud.dataplex.v1", @@ -63,6 +63,10 @@ class DataQualitySpec(proto.Message): post_scan_actions (google.cloud.dataplex_v1.types.DataQualitySpec.PostScanActions): Optional. Actions to take upon job completion. + catalog_publishing_enabled (bool): + Optional. If set, the latest DataScan job + result will be published as Dataplex Universal + Catalog metadata. """ class PostScanActions(proto.Message): @@ -208,6 +212,10 @@ class NotificationReport(proto.Message): number=6, message=PostScanActions, ) + catalog_publishing_enabled: bool = proto.Field( + proto.BOOL, + number=8, + ) class DataQualityResult(proto.Message): @@ -248,6 +256,10 @@ class DataQualityResult(proto.Message): result. post_scan_actions_result (google.cloud.dataplex_v1.types.DataQualityResult.PostScanActionsResult): Output only. The result of post scan actions. + catalog_publishing_status (google.cloud.dataplex_v1.types.DataScanCatalogPublishingStatus): + Output only. The status of publishing the + data scan as Dataplex Universal Catalog + metadata. """ class PostScanActionsResult(proto.Message): @@ -346,6 +358,13 @@ class State(proto.Enum): number=8, message=PostScanActionsResult, ) + catalog_publishing_status: datascans_common.DataScanCatalogPublishingStatus = ( + proto.Field( + proto.MESSAGE, + number=11, + message=datascans_common.DataScanCatalogPublishingStatus, + ) + ) class DataQualityRuleResult(proto.Message): @@ -482,10 +501,10 @@ class DataQualityDimension(proto.Message): Attributes: name (str): - Optional. The dimension name a rule belongs - to. Custom dimension name is supported with all - uppercase letters and maximum length of 30 - characters. + Output only. The dimension name a rule + belongs to. Custom dimension name is supported + with all uppercase letters and maximum length of + 30 characters. """ name: str = proto.Field( @@ -567,10 +586,11 @@ class DataQualityRule(proto.Message): - SetExpectation - UniquenessExpectation dimension (str): - Required. The dimension a rule belongs to. Results are also - aggregated at the dimension level. Supported dimensions are - **["COMPLETENESS", "ACCURACY", "CONSISTENCY", "VALIDITY", - "UNIQUENESS", "FRESHNESS", "VOLUME"]** + Required. The dimension a rule belongs to. + Results are also aggregated at the dimension + level. Custom dimension name is supported with + all uppercase letters and maximum length of 30 + characters. threshold (float): Optional. The minimum ratio of **passing_rows / total_rows** required to pass this rule, with a range of [0.0, 1.0]. @@ -919,6 +939,12 @@ class DataQualityColumnResult(proto.Message): points). This field is a member of `oneof`_ ``_score``. + passed (bool): + Output only. Whether the column passed or + failed. + dimensions (MutableSequence[google.cloud.dataplex_v1.types.DataQualityDimensionResult]): + Output only. The dimension-level results for + this column. """ column: str = proto.Field( @@ -930,6 +956,15 @@ class DataQualityColumnResult(proto.Message): number=2, optional=True, ) + passed: bool = proto.Field( + proto.BOOL, + number=3, + ) + dimensions: MutableSequence["DataQualityDimensionResult"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="DataQualityDimensionResult", + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_taxonomy.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_taxonomy.py index cb4e92cf412d..3e8e104b6a3a 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_taxonomy.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_taxonomy.py @@ -479,7 +479,7 @@ class ListDataTaxonomiesRequest(proto.Message): parent (str): Required. The resource name of the DataTaxonomy location, of the form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. page_size (int): Optional. Maximum number of DataTaxonomies to return. The service may return fewer than this diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/datascans.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/datascans.py index e9f3df8f1d40..aa370fd2abb8 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/datascans.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/datascans.py @@ -79,7 +79,7 @@ class CreateDataScanRequest(proto.Message): Required. The resource name of the parent location: ``projects/{project}/locations/{location_id}`` where ``project`` refers to a *project_id* or *project_number* and - ``location_id`` refers to a GCP region. + ``location_id`` refers to a Google Cloud region. data_scan (google.cloud.dataplex_v1.types.DataScan): Required. DataScan resource. data_scan_id (str): @@ -153,7 +153,8 @@ class DeleteDataScanRequest(proto.Message): Required. The resource name of the dataScan: ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP region. + *project_number* and ``location_id`` refers to a Google + Cloud region. force (bool): Optional. If set to true, any child resources of this data scan will also be deleted. @@ -179,7 +180,8 @@ class GetDataScanRequest(proto.Message): Required. The resource name of the dataScan: ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP region. + *project_number* and ``location_id`` refers to a Google + Cloud region. view (google.cloud.dataplex_v1.types.GetDataScanRequest.DataScanView): Optional. Select the DataScan view to return. Defaults to ``BASIC``. @@ -219,7 +221,7 @@ class ListDataScansRequest(proto.Message): Required. The resource name of the parent location: ``projects/{project}/locations/{location_id}`` where ``project`` refers to a *project_id* or *project_number* and - ``location_id`` refers to a GCP region. + ``location_id`` refers to a Google Cloud region. page_size (int): Optional. Maximum number of dataScans to return. The service may return fewer than this @@ -303,7 +305,8 @@ class RunDataScanRequest(proto.Message): Required. The resource name of the DataScan: ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}``. where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP region. + *project_number* and ``location_id`` refers to a Google + Cloud region. Only **OnDemand** data scans are allowed. """ @@ -337,7 +340,8 @@ class GetDataScanJobRequest(proto.Message): Required. The resource name of the DataScanJob: ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}/jobs/{data_scan_job_id}`` where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP region. + *project_number* and ``location_id`` refers to a Google + Cloud region. view (google.cloud.dataplex_v1.types.GetDataScanJobRequest.DataScanJobView): Optional. Select the DataScanJob view to return. Defaults to ``BASIC``. @@ -377,7 +381,8 @@ class ListDataScanJobsRequest(proto.Message): Required. The resource name of the parent environment: ``projects/{project}/locations/{location_id}/dataScans/{data_scan_id}`` where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP region. + *project_number* and ``location_id`` refers to a Google + Cloud region. page_size (int): Optional. Maximum number of DataScanJobs to return. The service may return fewer than this @@ -483,8 +488,8 @@ class GenerateDataQualityRulesResponse(proto.Message): Attributes: rule (MutableSequence[google.cloud.dataplex_v1.types.DataQualityRule]): The data quality rules that Dataplex - generates based on the results of a data - profiling scan. + Universal Catalog generates based on the results + of a data profiling scan. """ rule: MutableSequence[data_quality.DataQualityRule] = proto.RepeatedField( @@ -527,7 +532,8 @@ class DataScan(proto.Message): scan, of the form: ``projects/{project}/locations/{location_id}/dataScans/{datascan_id}``, where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP region. + *project_number* and ``location_id`` refers to a Google + Cloud region. uid (str): Output only. System generated globally unique ID for the scan. This ID will be different if @@ -768,7 +774,8 @@ class DataScanJob(proto.Message): DataScanJob, of the form: ``projects/{project}/locations/{location_id}/dataScans/{datascan_id}/jobs/{job_id}``, where ``project`` refers to a *project_id* or - *project_number* and ``location_id`` refers to a GCP region. + *project_number* and ``location_id`` refers to a Google + Cloud region. uid (str): Output only. System generated globally unique ID for the DataScanJob. diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/datascans_common.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/datascans_common.py new file mode 100644 index 000000000000..1a8ef46d0a0c --- /dev/null +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/datascans_common.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.dataplex.v1", + manifest={ + "DataScanCatalogPublishingStatus", + }, +) + + +class DataScanCatalogPublishingStatus(proto.Message): + r"""The status of publishing the data scan result as Dataplex + Universal Catalog metadata. + + Attributes: + state (google.cloud.dataplex_v1.types.DataScanCatalogPublishingStatus.State): + Output only. Execution state for catalog + publishing. + """ + + class State(proto.Enum): + r"""Execution state for the publishing. + + Values: + STATE_UNSPECIFIED (0): + The publishing state is unspecified. + SUCCEEDED (1): + Publish to catalog completed successfully. + FAILED (2): + Publish to catalog failed. + """ + STATE_UNSPECIFIED = 0 + SUCCEEDED = 1 + FAILED = 2 + + state: State = proto.Field( + proto.ENUM, + number=1, + enum=State, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/logs.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/logs.py index 7b1336f4f29e..54f0851abf29 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/logs.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/logs.py @@ -21,6 +21,8 @@ from google.protobuf import timestamp_pb2 # type: ignore import proto # type: ignore +from google.cloud.dataplex_v1.types import datascans_common + __protobuf__ = proto.module( package="google.cloud.dataplex.v1", manifest={ @@ -434,8 +436,9 @@ class ExecutionTrigger(proto.Enum): EXECUTION_TRIGGER_UNSPECIFIED (0): The job execution trigger is unspecified. TASK_CONFIG (1): - The job was triggered by Dataplex based on - trigger spec from task definition. + The job was triggered by Dataplex Universal + Catalog based on trigger spec from task + definition. RUN_REQUEST (2): The job was triggered by the explicit call of Task API. @@ -837,6 +840,9 @@ class DataScanEvent(proto.Message): This field is a member of `oneof`_ ``appliedConfigs``. post_scan_actions_result (google.cloud.dataplex_v1.types.DataScanEvent.PostScanActionsResult): The result of post scan actions. + catalog_publishing_status (google.cloud.dataplex_v1.types.DataScanCatalogPublishingStatus): + The status of publishing the data scan as + Dataplex Universal Catalog metadata. """ class ScanType(proto.Enum): @@ -872,7 +878,7 @@ class State(proto.Enum): CANCELLED (4): Data scan job was cancelled. CREATED (5): - Data scan job was createed. + Data scan job was created. """ STATE_UNSPECIFIED = 0 STARTED = 1 @@ -1180,6 +1186,13 @@ class State(proto.Enum): number=11, message=PostScanActionsResult, ) + catalog_publishing_status: datascans_common.DataScanCatalogPublishingStatus = ( + proto.Field( + proto.MESSAGE, + number=13, + message=datascans_common.DataScanCatalogPublishingStatus, + ) + ) class DataQualityScanRuleResult(proto.Message): diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/metadata_.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/metadata_.py index 4fc49722de73..bfd47aa4b8b8 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/metadata_.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/metadata_.py @@ -719,20 +719,21 @@ class Schema(proto.Message): Attributes: user_managed (bool): Required. Set to ``true`` if user-managed or ``false`` if - managed by Dataplex. The default is ``false`` (managed by - Dataplex). - - - Set to ``false``\ to enable Dataplex discovery to update - the schema. including new data discovery, schema - inference, and schema evolution. Users retain the ability - to input and edit the schema. Dataplex treats schema input - by the user as though produced by a previous Dataplex + managed by Dataplex Universal Catalog. The default is + ``false`` (managed by Dataplex Universal Catalog). + + - Set to ``false``\ to enable Dataplex Universal Catalog + discovery to update the schema. including new data + discovery, schema inference, and schema evolution. Users + retain the ability to input and edit the schema. Dataplex + Universal Catalog treats schema input by the user as + though produced by a previous Dataplex Universal Catalog discovery operation, and it will evolve the schema and take action based on that treatment. - Set to ``true`` to fully manage the entity schema. This - setting guarantees that Dataplex will not change schema - fields. + setting guarantees that Dataplex Universal Catalog will + not change schema fields. fields (MutableSequence[google.cloud.dataplex_v1.types.Schema.SchemaField]): Optional. The sequence of fields describing data in table entities. **Note:** BigQuery SchemaFields are immutable. diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/processing.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/processing.py index 6eb8897ac0fb..cbf390d581b3 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/processing.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/processing.py @@ -103,15 +103,19 @@ class DataSource(proto.Message): Attributes: entity (str): - Immutable. The Dataplex entity that represents the data - source (e.g. BigQuery table) for DataScan, of the form: + Immutable. The Dataplex Universal Catalog entity that + represents the data source (e.g. BigQuery table) for + DataScan, of the form: ``projects/{project_number}/locations/{location_id}/lakes/{lake_id}/zones/{zone_id}/entities/{entity_id}``. This field is a member of `oneof`_ ``source``. resource (str): Immutable. The service-qualified full resource name of the cloud resource for a DataScan job to scan against. The field - could be: BigQuery table of type "TABLE" for + could either be: Cloud Storage bucket for DataDiscoveryScan + Format: + //storage.googleapis.com/projects/PROJECT_ID/buckets/BUCKET_ID + or BigQuery table of type "TABLE" for DataProfileScan/DataQualityScan Format: //bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID @@ -151,13 +155,15 @@ class IncrementalField(proto.Message): Attributes: field (str): - The field that contains values which - monotonically increases over time (e.g. a + Output only. The field that contains values + which monotonically increases over time (e.g. a timestamp column). start (str): - Value that marks the start of the range. + Output only. Value that marks the start of + the range. end (str): - Value that marks the end of the range. + Output only. Value that marks the end of the + range. """ field: str = proto.Field( diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/service.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/service.py index 46c4f9c61a7f..009cf0e32bd1 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/service.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/service.py @@ -81,7 +81,7 @@ class CreateLakeRequest(proto.Message): parent (str): Required. The resource name of the lake location, of the form: projects/{project_number}/locations/{location_id} - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. lake_id (str): Required. Lake identifier. This ID will be used to generate names such as database and dataset names when publishing @@ -170,7 +170,7 @@ class ListLakesRequest(proto.Message): parent (str): Required. The resource name of the lake location, of the form: ``projects/{project_number}/locations/{location_id}`` - where ``location_id`` refers to a GCP region. + where ``location_id`` refers to a Google Cloud region. page_size (int): Optional. Maximum number of Lakes to return. The service may return fewer than this value. If diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/tasks.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/tasks.py index a8f313e987ce..7c0d9a249915 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/tasks.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/tasks.py @@ -665,7 +665,8 @@ class State(proto.Enum): FAILED (5): The job is no longer running due to an error. ABORTED (6): - The job was cancelled outside of Dataplex. + The job was cancelled outside of Dataplex + Universal Catalog. """ STATE_UNSPECIFIED = 0 RUNNING = 1 @@ -682,8 +683,9 @@ class Trigger(proto.Enum): TRIGGER_UNSPECIFIED (0): The trigger is unspecified. TASK_CONFIG (1): - The job was triggered by Dataplex based on - trigger spec from task definition. + The job was triggered by Dataplex Universal + Catalog based on trigger spec from task + definition. RUN_REQUEST (2): The job was triggered by the explicit call of Task API. diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_async.py new file mode 100644 index 000000000000..c87894b42634 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateGlossary +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_CreateGlossary_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_glossary(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateGlossaryRequest( + parent="parent_value", + glossary_id="glossary_id_value", + ) + + # Make the request + operation = client.create_glossary(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_CreateGlossary_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_category_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_category_async.py new file mode 100644 index 000000000000..02835539a9b5 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_category_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateGlossaryCategory +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_CreateGlossaryCategory_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_glossary_category(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + category = dataplex_v1.GlossaryCategory() + category.parent = "parent_value" + + request = dataplex_v1.CreateGlossaryCategoryRequest( + parent="parent_value", + category_id="category_id_value", + category=category, + ) + + # Make the request + response = await client.create_glossary_category(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_CreateGlossaryCategory_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_category_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_category_sync.py new file mode 100644 index 000000000000..2a3ed96a2236 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_category_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateGlossaryCategory +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_CreateGlossaryCategory_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_glossary_category(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + category = dataplex_v1.GlossaryCategory() + category.parent = "parent_value" + + request = dataplex_v1.CreateGlossaryCategoryRequest( + parent="parent_value", + category_id="category_id_value", + category=category, + ) + + # Make the request + response = client.create_glossary_category(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_CreateGlossaryCategory_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_sync.py new file mode 100644 index 000000000000..fcbc1d2bdeae --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateGlossary +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_CreateGlossary_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_glossary(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.CreateGlossaryRequest( + parent="parent_value", + glossary_id="glossary_id_value", + ) + + # Make the request + operation = client.create_glossary(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_CreateGlossary_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_term_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_term_async.py new file mode 100644 index 000000000000..08851757a5be --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_term_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateGlossaryTerm +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_CreateGlossaryTerm_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_glossary_term(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + term = dataplex_v1.GlossaryTerm() + term.parent = "parent_value" + + request = dataplex_v1.CreateGlossaryTermRequest( + parent="parent_value", + term_id="term_id_value", + term=term, + ) + + # Make the request + response = await client.create_glossary_term(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_CreateGlossaryTerm_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_term_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_term_sync.py new file mode 100644 index 000000000000..9987138e7c68 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_create_glossary_term_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateGlossaryTerm +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_CreateGlossaryTerm_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_glossary_term(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + term = dataplex_v1.GlossaryTerm() + term.parent = "parent_value" + + request = dataplex_v1.CreateGlossaryTermRequest( + parent="parent_value", + term_id="term_id_value", + term=term, + ) + + # Make the request + response = client.create_glossary_term(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_CreateGlossaryTerm_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_async.py new file mode 100644 index 000000000000..770ba6a5e13f --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteGlossary +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_DeleteGlossary_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_glossary(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteGlossaryRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_glossary(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_DeleteGlossary_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_category_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_category_async.py new file mode 100644 index 000000000000..4e63aea64920 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_category_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteGlossaryCategory +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_DeleteGlossaryCategory_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_glossary_category(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteGlossaryCategoryRequest( + name="name_value", + ) + + # Make the request + await client.delete_glossary_category(request=request) + + +# [END dataplex_v1_generated_BusinessGlossaryService_DeleteGlossaryCategory_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_category_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_category_sync.py new file mode 100644 index 000000000000..129f5ee68142 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_category_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteGlossaryCategory +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_DeleteGlossaryCategory_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_glossary_category(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteGlossaryCategoryRequest( + name="name_value", + ) + + # Make the request + client.delete_glossary_category(request=request) + + +# [END dataplex_v1_generated_BusinessGlossaryService_DeleteGlossaryCategory_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_sync.py new file mode 100644 index 000000000000..ce878bcc195e --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteGlossary +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_DeleteGlossary_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_glossary(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteGlossaryRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_glossary(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_DeleteGlossary_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_term_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_term_async.py new file mode 100644 index 000000000000..bea27d33034d --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_term_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteGlossaryTerm +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_DeleteGlossaryTerm_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_glossary_term(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteGlossaryTermRequest( + name="name_value", + ) + + # Make the request + await client.delete_glossary_term(request=request) + + +# [END dataplex_v1_generated_BusinessGlossaryService_DeleteGlossaryTerm_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_term_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_term_sync.py new file mode 100644 index 000000000000..87c4ba532889 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_delete_glossary_term_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteGlossaryTerm +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_DeleteGlossaryTerm_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_glossary_term(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteGlossaryTermRequest( + name="name_value", + ) + + # Make the request + client.delete_glossary_term(request=request) + + +# [END dataplex_v1_generated_BusinessGlossaryService_DeleteGlossaryTerm_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_async.py new file mode 100644 index 000000000000..b46e94009514 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetGlossary +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_GetGlossary_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_glossary(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetGlossaryRequest( + name="name_value", + ) + + # Make the request + response = await client.get_glossary(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_GetGlossary_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_category_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_category_async.py new file mode 100644 index 000000000000..7524e741a51d --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_category_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetGlossaryCategory +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_GetGlossaryCategory_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_glossary_category(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetGlossaryCategoryRequest( + name="name_value", + ) + + # Make the request + response = await client.get_glossary_category(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_GetGlossaryCategory_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_category_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_category_sync.py new file mode 100644 index 000000000000..43e3d9ec99a5 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_category_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetGlossaryCategory +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_GetGlossaryCategory_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_glossary_category(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetGlossaryCategoryRequest( + name="name_value", + ) + + # Make the request + response = client.get_glossary_category(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_GetGlossaryCategory_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_sync.py new file mode 100644 index 000000000000..943a5534d01a --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetGlossary +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_GetGlossary_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_glossary(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetGlossaryRequest( + name="name_value", + ) + + # Make the request + response = client.get_glossary(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_GetGlossary_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_term_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_term_async.py new file mode 100644 index 000000000000..baf885229bbc --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_term_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetGlossaryTerm +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_GetGlossaryTerm_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_glossary_term(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetGlossaryTermRequest( + name="name_value", + ) + + # Make the request + response = await client.get_glossary_term(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_GetGlossaryTerm_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_term_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_term_sync.py new file mode 100644 index 000000000000..3b2368612e89 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_get_glossary_term_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetGlossaryTerm +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_GetGlossaryTerm_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_glossary_term(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetGlossaryTermRequest( + name="name_value", + ) + + # Make the request + response = client.get_glossary_term(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_GetGlossaryTerm_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossaries_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossaries_async.py new file mode 100644 index 000000000000..d9dbd0f0767c --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossaries_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListGlossaries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_ListGlossaries_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_glossaries(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListGlossariesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_glossaries(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_ListGlossaries_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossaries_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossaries_sync.py new file mode 100644 index 000000000000..1b2469dcf0b9 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossaries_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListGlossaries +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_ListGlossaries_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_glossaries(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListGlossariesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_glossaries(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_ListGlossaries_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_categories_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_categories_async.py new file mode 100644 index 000000000000..14d77cb5f9a3 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_categories_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListGlossaryCategories +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_ListGlossaryCategories_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_glossary_categories(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListGlossaryCategoriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_glossary_categories(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_ListGlossaryCategories_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_categories_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_categories_sync.py new file mode 100644 index 000000000000..357b89a4a77c --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_categories_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListGlossaryCategories +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_ListGlossaryCategories_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_glossary_categories(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListGlossaryCategoriesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_glossary_categories(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_ListGlossaryCategories_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_terms_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_terms_async.py new file mode 100644 index 000000000000..d3a9df2e788d --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_terms_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListGlossaryTerms +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_ListGlossaryTerms_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_list_glossary_terms(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.ListGlossaryTermsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_glossary_terms(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_ListGlossaryTerms_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_terms_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_terms_sync.py new file mode 100644 index 000000000000..7f3d1a00bc6c --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_list_glossary_terms_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListGlossaryTerms +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_ListGlossaryTerms_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_list_glossary_terms(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.ListGlossaryTermsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_glossary_terms(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_ListGlossaryTerms_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_async.py new file mode 100644 index 000000000000..21cc26ce8a41 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateGlossary +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_UpdateGlossary_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_glossary(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateGlossaryRequest( + ) + + # Make the request + operation = client.update_glossary(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_UpdateGlossary_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_category_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_category_async.py new file mode 100644 index 000000000000..bc9f3931ebeb --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_category_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateGlossaryCategory +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_UpdateGlossaryCategory_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_glossary_category(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + category = dataplex_v1.GlossaryCategory() + category.parent = "parent_value" + + request = dataplex_v1.UpdateGlossaryCategoryRequest( + category=category, + ) + + # Make the request + response = await client.update_glossary_category(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_UpdateGlossaryCategory_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_category_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_category_sync.py new file mode 100644 index 000000000000..27c16af104b4 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_category_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateGlossaryCategory +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_UpdateGlossaryCategory_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_glossary_category(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + category = dataplex_v1.GlossaryCategory() + category.parent = "parent_value" + + request = dataplex_v1.UpdateGlossaryCategoryRequest( + category=category, + ) + + # Make the request + response = client.update_glossary_category(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_UpdateGlossaryCategory_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_sync.py new file mode 100644 index 000000000000..9e5cda83589d --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateGlossary +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_UpdateGlossary_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_glossary(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.UpdateGlossaryRequest( + ) + + # Make the request + operation = client.update_glossary(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_UpdateGlossary_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_term_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_term_async.py new file mode 100644 index 000000000000..f1c0183067c4 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_term_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateGlossaryTerm +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_UpdateGlossaryTerm_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_update_glossary_term(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceAsyncClient() + + # Initialize request argument(s) + term = dataplex_v1.GlossaryTerm() + term.parent = "parent_value" + + request = dataplex_v1.UpdateGlossaryTermRequest( + term=term, + ) + + # Make the request + response = await client.update_glossary_term(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_UpdateGlossaryTerm_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_term_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_term_sync.py new file mode 100644 index 000000000000..f162ab6e6cd5 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_business_glossary_service_update_glossary_term_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateGlossaryTerm +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_BusinessGlossaryService_UpdateGlossaryTerm_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_update_glossary_term(): + # Create a client + client = dataplex_v1.BusinessGlossaryServiceClient() + + # Initialize request argument(s) + term = dataplex_v1.GlossaryTerm() + term.parent = "parent_value" + + request = dataplex_v1.UpdateGlossaryTermRequest( + term=term, + ) + + # Make the request + response = client.update_glossary_term(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_BusinessGlossaryService_UpdateGlossaryTerm_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_link_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_link_async.py new file mode 100644 index 000000000000..11fdd293d63e --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_link_async.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEntryLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_CreateEntryLink_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_create_entry_link(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + entry_link = dataplex_v1.EntryLink() + entry_link.entry_link_type = "entry_link_type_value" + entry_link.entry_references.name = "name_value" + entry_link.entry_references.type_ = "TARGET" + + request = dataplex_v1.CreateEntryLinkRequest( + parent="parent_value", + entry_link_id="entry_link_id_value", + entry_link=entry_link, + ) + + # Make the request + response = await client.create_entry_link(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_CreateEntryLink_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_link_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_link_sync.py new file mode 100644 index 000000000000..545fc9510ac8 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_create_entry_link_sync.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateEntryLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_CreateEntryLink_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_create_entry_link(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + entry_link = dataplex_v1.EntryLink() + entry_link.entry_link_type = "entry_link_type_value" + entry_link.entry_references.name = "name_value" + entry_link.entry_references.type_ = "TARGET" + + request = dataplex_v1.CreateEntryLinkRequest( + parent="parent_value", + entry_link_id="entry_link_id_value", + entry_link=entry_link, + ) + + # Make the request + response = client.create_entry_link(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_CreateEntryLink_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_link_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_link_async.py new file mode 100644 index 000000000000..5f6e273420d5 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_link_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEntryLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_DeleteEntryLink_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_delete_entry_link(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntryLinkRequest( + name="name_value", + ) + + # Make the request + response = await client.delete_entry_link(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_DeleteEntryLink_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_link_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_link_sync.py new file mode 100644 index 000000000000..9fe8fd9e84e0 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_delete_entry_link_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteEntryLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_DeleteEntryLink_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_delete_entry_link(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.DeleteEntryLinkRequest( + name="name_value", + ) + + # Make the request + response = client.delete_entry_link(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_DeleteEntryLink_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_link_async.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_link_async.py new file mode 100644 index 000000000000..c60929bae482 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_link_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEntryLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_GetEntryLink_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +async def sample_get_entry_link(): + # Create a client + client = dataplex_v1.CatalogServiceAsyncClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntryLinkRequest( + name="name_value", + ) + + # Make the request + response = await client.get_entry_link(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_GetEntryLink_async] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_link_sync.py b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_link_sync.py new file mode 100644 index 000000000000..5c090b4900f2 --- /dev/null +++ b/packages/google-cloud-dataplex/samples/generated_samples/dataplex_v1_generated_catalog_service_get_entry_link_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetEntryLink +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-dataplex + + +# [START dataplex_v1_generated_CatalogService_GetEntryLink_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import dataplex_v1 + + +def sample_get_entry_link(): + # Create a client + client = dataplex_v1.CatalogServiceClient() + + # Initialize request argument(s) + request = dataplex_v1.GetEntryLinkRequest( + name="name_value", + ) + + # Make the request + response = client.get_entry_link(request=request) + + # Handle the response + print(response) + +# [END dataplex_v1_generated_CatalogService_GetEntryLink_sync] diff --git a/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json b/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json index bba5daaf9f93..a12a3f36de1e 100644 --- a/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json +++ b/packages/google-cloud-dataplex/samples/generated_samples/snippet_metadata_google.cloud.dataplex.v1.json @@ -11,6 +11,2481 @@ "version": "0.1.0" }, "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.create_glossary_category", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.CreateGlossaryCategory", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "CreateGlossaryCategory" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateGlossaryCategoryRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "category", + "type": "google.cloud.dataplex_v1.types.GlossaryCategory" + }, + { + "name": "category_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GlossaryCategory", + "shortName": "create_glossary_category" + }, + "description": "Sample for CreateGlossaryCategory", + "file": "dataplex_v1_generated_business_glossary_service_create_glossary_category_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_CreateGlossaryCategory_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_create_glossary_category_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.create_glossary_category", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.CreateGlossaryCategory", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "CreateGlossaryCategory" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateGlossaryCategoryRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "category", + "type": "google.cloud.dataplex_v1.types.GlossaryCategory" + }, + { + "name": "category_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GlossaryCategory", + "shortName": "create_glossary_category" + }, + "description": "Sample for CreateGlossaryCategory", + "file": "dataplex_v1_generated_business_glossary_service_create_glossary_category_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_CreateGlossaryCategory_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_create_glossary_category_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.create_glossary_term", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.CreateGlossaryTerm", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "CreateGlossaryTerm" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateGlossaryTermRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "term", + "type": "google.cloud.dataplex_v1.types.GlossaryTerm" + }, + { + "name": "term_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GlossaryTerm", + "shortName": "create_glossary_term" + }, + "description": "Sample for CreateGlossaryTerm", + "file": "dataplex_v1_generated_business_glossary_service_create_glossary_term_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_CreateGlossaryTerm_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_create_glossary_term_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.create_glossary_term", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.CreateGlossaryTerm", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "CreateGlossaryTerm" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateGlossaryTermRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "term", + "type": "google.cloud.dataplex_v1.types.GlossaryTerm" + }, + { + "name": "term_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GlossaryTerm", + "shortName": "create_glossary_term" + }, + "description": "Sample for CreateGlossaryTerm", + "file": "dataplex_v1_generated_business_glossary_service_create_glossary_term_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_CreateGlossaryTerm_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_create_glossary_term_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.create_glossary", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.CreateGlossary", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "CreateGlossary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateGlossaryRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "glossary", + "type": "google.cloud.dataplex_v1.types.Glossary" + }, + { + "name": "glossary_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_glossary" + }, + "description": "Sample for CreateGlossary", + "file": "dataplex_v1_generated_business_glossary_service_create_glossary_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_CreateGlossary_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_create_glossary_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.create_glossary", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.CreateGlossary", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "CreateGlossary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateGlossaryRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "glossary", + "type": "google.cloud.dataplex_v1.types.Glossary" + }, + { + "name": "glossary_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_glossary" + }, + "description": "Sample for CreateGlossary", + "file": "dataplex_v1_generated_business_glossary_service_create_glossary_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_CreateGlossary_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_create_glossary_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.delete_glossary_category", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.DeleteGlossaryCategory", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "DeleteGlossaryCategory" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteGlossaryCategoryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_glossary_category" + }, + "description": "Sample for DeleteGlossaryCategory", + "file": "dataplex_v1_generated_business_glossary_service_delete_glossary_category_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_DeleteGlossaryCategory_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_delete_glossary_category_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.delete_glossary_category", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.DeleteGlossaryCategory", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "DeleteGlossaryCategory" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteGlossaryCategoryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_glossary_category" + }, + "description": "Sample for DeleteGlossaryCategory", + "file": "dataplex_v1_generated_business_glossary_service_delete_glossary_category_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_DeleteGlossaryCategory_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_delete_glossary_category_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.delete_glossary_term", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.DeleteGlossaryTerm", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "DeleteGlossaryTerm" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteGlossaryTermRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_glossary_term" + }, + "description": "Sample for DeleteGlossaryTerm", + "file": "dataplex_v1_generated_business_glossary_service_delete_glossary_term_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_DeleteGlossaryTerm_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_delete_glossary_term_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.delete_glossary_term", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.DeleteGlossaryTerm", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "DeleteGlossaryTerm" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteGlossaryTermRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_glossary_term" + }, + "description": "Sample for DeleteGlossaryTerm", + "file": "dataplex_v1_generated_business_glossary_service_delete_glossary_term_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_DeleteGlossaryTerm_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_delete_glossary_term_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.delete_glossary", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.DeleteGlossary", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "DeleteGlossary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteGlossaryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_glossary" + }, + "description": "Sample for DeleteGlossary", + "file": "dataplex_v1_generated_business_glossary_service_delete_glossary_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_DeleteGlossary_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_delete_glossary_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.delete_glossary", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.DeleteGlossary", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "DeleteGlossary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteGlossaryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_glossary" + }, + "description": "Sample for DeleteGlossary", + "file": "dataplex_v1_generated_business_glossary_service_delete_glossary_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_DeleteGlossary_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_delete_glossary_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.get_glossary_category", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.GetGlossaryCategory", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "GetGlossaryCategory" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetGlossaryCategoryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GlossaryCategory", + "shortName": "get_glossary_category" + }, + "description": "Sample for GetGlossaryCategory", + "file": "dataplex_v1_generated_business_glossary_service_get_glossary_category_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_GetGlossaryCategory_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_get_glossary_category_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.get_glossary_category", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.GetGlossaryCategory", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "GetGlossaryCategory" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetGlossaryCategoryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GlossaryCategory", + "shortName": "get_glossary_category" + }, + "description": "Sample for GetGlossaryCategory", + "file": "dataplex_v1_generated_business_glossary_service_get_glossary_category_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_GetGlossaryCategory_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_get_glossary_category_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.get_glossary_term", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.GetGlossaryTerm", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "GetGlossaryTerm" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetGlossaryTermRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GlossaryTerm", + "shortName": "get_glossary_term" + }, + "description": "Sample for GetGlossaryTerm", + "file": "dataplex_v1_generated_business_glossary_service_get_glossary_term_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_GetGlossaryTerm_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_get_glossary_term_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.get_glossary_term", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.GetGlossaryTerm", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "GetGlossaryTerm" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetGlossaryTermRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GlossaryTerm", + "shortName": "get_glossary_term" + }, + "description": "Sample for GetGlossaryTerm", + "file": "dataplex_v1_generated_business_glossary_service_get_glossary_term_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_GetGlossaryTerm_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_get_glossary_term_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.get_glossary", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.GetGlossary", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "GetGlossary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetGlossaryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Glossary", + "shortName": "get_glossary" + }, + "description": "Sample for GetGlossary", + "file": "dataplex_v1_generated_business_glossary_service_get_glossary_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_GetGlossary_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_get_glossary_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.get_glossary", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.GetGlossary", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "GetGlossary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetGlossaryRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.Glossary", + "shortName": "get_glossary" + }, + "description": "Sample for GetGlossary", + "file": "dataplex_v1_generated_business_glossary_service_get_glossary_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_GetGlossary_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_get_glossary_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.list_glossaries", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.ListGlossaries", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "ListGlossaries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListGlossariesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.business_glossary_service.pagers.ListGlossariesAsyncPager", + "shortName": "list_glossaries" + }, + "description": "Sample for ListGlossaries", + "file": "dataplex_v1_generated_business_glossary_service_list_glossaries_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_ListGlossaries_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_list_glossaries_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.list_glossaries", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.ListGlossaries", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "ListGlossaries" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListGlossariesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.business_glossary_service.pagers.ListGlossariesPager", + "shortName": "list_glossaries" + }, + "description": "Sample for ListGlossaries", + "file": "dataplex_v1_generated_business_glossary_service_list_glossaries_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_ListGlossaries_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_list_glossaries_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.list_glossary_categories", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.ListGlossaryCategories", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "ListGlossaryCategories" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListGlossaryCategoriesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.business_glossary_service.pagers.ListGlossaryCategoriesAsyncPager", + "shortName": "list_glossary_categories" + }, + "description": "Sample for ListGlossaryCategories", + "file": "dataplex_v1_generated_business_glossary_service_list_glossary_categories_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_ListGlossaryCategories_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_list_glossary_categories_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.list_glossary_categories", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.ListGlossaryCategories", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "ListGlossaryCategories" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListGlossaryCategoriesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.business_glossary_service.pagers.ListGlossaryCategoriesPager", + "shortName": "list_glossary_categories" + }, + "description": "Sample for ListGlossaryCategories", + "file": "dataplex_v1_generated_business_glossary_service_list_glossary_categories_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_ListGlossaryCategories_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_list_glossary_categories_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.list_glossary_terms", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.ListGlossaryTerms", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "ListGlossaryTerms" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListGlossaryTermsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.business_glossary_service.pagers.ListGlossaryTermsAsyncPager", + "shortName": "list_glossary_terms" + }, + "description": "Sample for ListGlossaryTerms", + "file": "dataplex_v1_generated_business_glossary_service_list_glossary_terms_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_ListGlossaryTerms_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_list_glossary_terms_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.list_glossary_terms", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.ListGlossaryTerms", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "ListGlossaryTerms" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.ListGlossaryTermsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.services.business_glossary_service.pagers.ListGlossaryTermsPager", + "shortName": "list_glossary_terms" + }, + "description": "Sample for ListGlossaryTerms", + "file": "dataplex_v1_generated_business_glossary_service_list_glossary_terms_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_ListGlossaryTerms_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_list_glossary_terms_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.update_glossary_category", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.UpdateGlossaryCategory", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "UpdateGlossaryCategory" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateGlossaryCategoryRequest" + }, + { + "name": "category", + "type": "google.cloud.dataplex_v1.types.GlossaryCategory" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GlossaryCategory", + "shortName": "update_glossary_category" + }, + "description": "Sample for UpdateGlossaryCategory", + "file": "dataplex_v1_generated_business_glossary_service_update_glossary_category_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_UpdateGlossaryCategory_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_update_glossary_category_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.update_glossary_category", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.UpdateGlossaryCategory", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "UpdateGlossaryCategory" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateGlossaryCategoryRequest" + }, + { + "name": "category", + "type": "google.cloud.dataplex_v1.types.GlossaryCategory" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GlossaryCategory", + "shortName": "update_glossary_category" + }, + "description": "Sample for UpdateGlossaryCategory", + "file": "dataplex_v1_generated_business_glossary_service_update_glossary_category_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_UpdateGlossaryCategory_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_update_glossary_category_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.update_glossary_term", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.UpdateGlossaryTerm", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "UpdateGlossaryTerm" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateGlossaryTermRequest" + }, + { + "name": "term", + "type": "google.cloud.dataplex_v1.types.GlossaryTerm" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GlossaryTerm", + "shortName": "update_glossary_term" + }, + "description": "Sample for UpdateGlossaryTerm", + "file": "dataplex_v1_generated_business_glossary_service_update_glossary_term_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_UpdateGlossaryTerm_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_update_glossary_term_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.update_glossary_term", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.UpdateGlossaryTerm", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "UpdateGlossaryTerm" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateGlossaryTermRequest" + }, + { + "name": "term", + "type": "google.cloud.dataplex_v1.types.GlossaryTerm" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.GlossaryTerm", + "shortName": "update_glossary_term" + }, + "description": "Sample for UpdateGlossaryTerm", + "file": "dataplex_v1_generated_business_glossary_service_update_glossary_term_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_UpdateGlossaryTerm_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_update_glossary_term_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient", + "shortName": "BusinessGlossaryServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceAsyncClient.update_glossary", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.UpdateGlossary", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "UpdateGlossary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateGlossaryRequest" + }, + { + "name": "glossary", + "type": "google.cloud.dataplex_v1.types.Glossary" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_glossary" + }, + "description": "Sample for UpdateGlossary", + "file": "dataplex_v1_generated_business_glossary_service_update_glossary_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_UpdateGlossary_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_update_glossary_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient", + "shortName": "BusinessGlossaryServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.BusinessGlossaryServiceClient.update_glossary", + "method": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService.UpdateGlossary", + "service": { + "fullName": "google.cloud.dataplex.v1.BusinessGlossaryService", + "shortName": "BusinessGlossaryService" + }, + "shortName": "UpdateGlossary" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.UpdateGlossaryRequest" + }, + { + "name": "glossary", + "type": "google.cloud.dataplex_v1.types.Glossary" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_glossary" + }, + "description": "Sample for UpdateGlossary", + "file": "dataplex_v1_generated_business_glossary_service_update_glossary_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_BusinessGlossaryService_UpdateGlossary_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_business_glossary_service_update_glossary_sync.py" + }, { "canonical": true, "clientMethod": { @@ -269,23 +2744,200 @@ "fullName": "google.cloud.dataplex.v1.CatalogService", "shortName": "CatalogService" }, - "shortName": "CreateAspectType" + "shortName": "CreateAspectType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateAspectTypeRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "aspect_type", + "type": "google.cloud.dataplex_v1.types.AspectType" + }, + { + "name": "aspect_type_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_aspect_type" + }, + "description": "Sample for CreateAspectType", + "file": "dataplex_v1_generated_catalog_service_create_aspect_type_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CreateAspectType_sync", + "segments": [ + { + "end": 61, + "start": 27, + "type": "FULL" + }, + { + "end": 61, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 51, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 58, + "start": 52, + "type": "REQUEST_EXECUTION" + }, + { + "end": 62, + "start": 59, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_create_aspect_type_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.create_entry_group", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntryGroup", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CreateEntryGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateEntryGroupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "entry_group", + "type": "google.cloud.dataplex_v1.types.EntryGroup" + }, + { + "name": "entry_group_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_entry_group" + }, + "description": "Sample for CreateEntryGroup", + "file": "dataplex_v1_generated_catalog_service_create_entry_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CreateEntryGroup_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_create_entry_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.create_entry_group", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntryGroup", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "CreateEntryGroup" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateAspectTypeRequest" + "type": "google.cloud.dataplex_v1.types.CreateEntryGroupRequest" }, { "name": "parent", "type": "str" }, { - "name": "aspect_type", - "type": "google.cloud.dataplex_v1.types.AspectType" + "name": "entry_group", + "type": "google.cloud.dataplex_v1.types.EntryGroup" }, { - "name": "aspect_type_id", + "name": "entry_group_id", "type": "str" }, { @@ -302,21 +2954,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "create_aspect_type" + "shortName": "create_entry_group" }, - "description": "Sample for CreateAspectType", - "file": "dataplex_v1_generated_catalog_service_create_aspect_type_sync.py", + "description": "Sample for CreateEntryGroup", + "file": "dataplex_v1_generated_catalog_service_create_entry_group_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_CreateAspectType_sync", + "regionTag": "dataplex_v1_generated_CatalogService_CreateEntryGroup_sync", "segments": [ { - "end": 61, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 61, + "end": 56, "start": 27, "type": "SHORT" }, @@ -326,22 +2978,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 51, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 58, - "start": 52, + "end": 53, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 62, - "start": 59, + "end": 57, + "start": 54, "type": "RESPONSE_HANDLING" } ], - "title": "dataplex_v1_generated_catalog_service_create_aspect_type_sync.py" + "title": "dataplex_v1_generated_catalog_service_create_entry_group_sync.py" }, { "canonical": true, @@ -351,30 +3003,30 @@ "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", "shortName": "CatalogServiceAsyncClient" }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.create_entry_group", + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.create_entry_link", "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntryGroup", + "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntryLink", "service": { "fullName": "google.cloud.dataplex.v1.CatalogService", "shortName": "CatalogService" }, - "shortName": "CreateEntryGroup" + "shortName": "CreateEntryLink" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateEntryGroupRequest" + "type": "google.cloud.dataplex_v1.types.CreateEntryLinkRequest" }, { "name": "parent", "type": "str" }, { - "name": "entry_group", - "type": "google.cloud.dataplex_v1.types.EntryGroup" + "name": "entry_link", + "type": "google.cloud.dataplex_v1.types.EntryLink" }, { - "name": "entry_group_id", + "name": "entry_link_id", "type": "str" }, { @@ -390,22 +3042,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "create_entry_group" + "resultType": "google.cloud.dataplex_v1.types.EntryLink", + "shortName": "create_entry_link" }, - "description": "Sample for CreateEntryGroup", - "file": "dataplex_v1_generated_catalog_service_create_entry_group_async.py", + "description": "Sample for CreateEntryLink", + "file": "dataplex_v1_generated_catalog_service_create_entry_link_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_CreateEntryGroup_async", + "regionTag": "dataplex_v1_generated_CatalogService_CreateEntryLink_async", "segments": [ { - "end": 56, + "end": 58, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 58, "start": 27, "type": "SHORT" }, @@ -415,22 +3067,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 52, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 55, + "start": 53, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 59, + "start": 56, "type": "RESPONSE_HANDLING" } ], - "title": "dataplex_v1_generated_catalog_service_create_entry_group_async.py" + "title": "dataplex_v1_generated_catalog_service_create_entry_link_async.py" }, { "canonical": true, @@ -439,30 +3091,30 @@ "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", "shortName": "CatalogServiceClient" }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.create_entry_group", + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.create_entry_link", "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntryGroup", + "fullName": "google.cloud.dataplex.v1.CatalogService.CreateEntryLink", "service": { "fullName": "google.cloud.dataplex.v1.CatalogService", "shortName": "CatalogService" }, - "shortName": "CreateEntryGroup" + "shortName": "CreateEntryLink" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateEntryGroupRequest" + "type": "google.cloud.dataplex_v1.types.CreateEntryLinkRequest" }, { "name": "parent", "type": "str" }, { - "name": "entry_group", - "type": "google.cloud.dataplex_v1.types.EntryGroup" + "name": "entry_link", + "type": "google.cloud.dataplex_v1.types.EntryLink" }, { - "name": "entry_group_id", + "name": "entry_link_id", "type": "str" }, { @@ -478,22 +3130,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "create_entry_group" + "resultType": "google.cloud.dataplex_v1.types.EntryLink", + "shortName": "create_entry_link" }, - "description": "Sample for CreateEntryGroup", - "file": "dataplex_v1_generated_catalog_service_create_entry_group_sync.py", + "description": "Sample for CreateEntryLink", + "file": "dataplex_v1_generated_catalog_service_create_entry_link_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_CreateEntryGroup_sync", + "regionTag": "dataplex_v1_generated_CatalogService_CreateEntryLink_sync", "segments": [ { - "end": 56, + "end": 58, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 58, "start": 27, "type": "SHORT" }, @@ -503,22 +3155,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 52, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 53, - "start": 47, + "end": 55, + "start": 53, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 54, + "end": 59, + "start": 56, "type": "RESPONSE_HANDLING" } ], - "title": "dataplex_v1_generated_catalog_service_create_entry_group_sync.py" + "title": "dataplex_v1_generated_catalog_service_create_entry_link_sync.py" }, { "canonical": true, @@ -977,23 +3629,184 @@ "fullName": "google.cloud.dataplex.v1.CatalogService", "shortName": "CatalogService" }, - "shortName": "CreateMetadataJob" + "shortName": "CreateMetadataJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.CreateMetadataJobRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "metadata_job", + "type": "google.cloud.dataplex_v1.types.MetadataJob" + }, + { + "name": "metadata_job_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_metadata_job" + }, + "description": "Sample for CreateMetadataJob", + "file": "dataplex_v1_generated_catalog_service_create_metadata_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_CreateMetadataJob_sync", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 53, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 60, + "start": 54, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 61, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_create_metadata_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.delete_aspect_type", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteAspectType", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "DeleteAspectType" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.DeleteAspectTypeRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_aspect_type" + }, + "description": "Sample for DeleteAspectType", + "file": "dataplex_v1_generated_catalog_service_delete_aspect_type_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_DeleteAspectType_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_delete_aspect_type_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.delete_aspect_type", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteAspectType", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "DeleteAspectType" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataplex_v1.types.CreateMetadataJobRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "metadata_job", - "type": "google.cloud.dataplex_v1.types.MetadataJob" + "type": "google.cloud.dataplex_v1.types.DeleteAspectTypeRequest" }, { - "name": "metadata_job_id", + "name": "name", "type": "str" }, { @@ -1010,21 +3823,21 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "create_metadata_job" + "shortName": "delete_aspect_type" }, - "description": "Sample for CreateMetadataJob", - "file": "dataplex_v1_generated_catalog_service_create_metadata_job_sync.py", + "description": "Sample for DeleteAspectType", + "file": "dataplex_v1_generated_catalog_service_delete_aspect_type_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_CreateMetadataJob_sync", + "regionTag": "dataplex_v1_generated_CatalogService_DeleteAspectType_sync", "segments": [ { - "end": 63, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 63, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1034,22 +3847,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 53, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 60, - "start": 54, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 64, - "start": 61, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "dataplex_v1_generated_catalog_service_create_metadata_job_sync.py" + "title": "dataplex_v1_generated_catalog_service_delete_aspect_type_sync.py" }, { "canonical": true, @@ -1059,19 +3872,19 @@ "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", "shortName": "CatalogServiceAsyncClient" }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.delete_aspect_type", + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.delete_entry_group", "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteAspectType", + "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntryGroup", "service": { "fullName": "google.cloud.dataplex.v1.CatalogService", "shortName": "CatalogService" }, - "shortName": "DeleteAspectType" + "shortName": "DeleteEntryGroup" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteAspectTypeRequest" + "type": "google.cloud.dataplex_v1.types.DeleteEntryGroupRequest" }, { "name": "name", @@ -1091,13 +3904,13 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_aspect_type" + "shortName": "delete_entry_group" }, - "description": "Sample for DeleteAspectType", - "file": "dataplex_v1_generated_catalog_service_delete_aspect_type_async.py", + "description": "Sample for DeleteEntryGroup", + "file": "dataplex_v1_generated_catalog_service_delete_entry_group_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_DeleteAspectType_async", + "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntryGroup_async", "segments": [ { "end": 55, @@ -1130,7 +3943,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataplex_v1_generated_catalog_service_delete_aspect_type_async.py" + "title": "dataplex_v1_generated_catalog_service_delete_entry_group_async.py" }, { "canonical": true, @@ -1139,19 +3952,19 @@ "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", "shortName": "CatalogServiceClient" }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.delete_aspect_type", + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.delete_entry_group", "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteAspectType", + "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntryGroup", "service": { "fullName": "google.cloud.dataplex.v1.CatalogService", "shortName": "CatalogService" }, - "shortName": "DeleteAspectType" + "shortName": "DeleteEntryGroup" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteAspectTypeRequest" + "type": "google.cloud.dataplex_v1.types.DeleteEntryGroupRequest" }, { "name": "name", @@ -1171,13 +3984,13 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "delete_aspect_type" + "shortName": "delete_entry_group" }, - "description": "Sample for DeleteAspectType", - "file": "dataplex_v1_generated_catalog_service_delete_aspect_type_sync.py", + "description": "Sample for DeleteEntryGroup", + "file": "dataplex_v1_generated_catalog_service_delete_entry_group_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_DeleteAspectType_sync", + "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntryGroup_sync", "segments": [ { "end": 55, @@ -1210,7 +4023,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "dataplex_v1_generated_catalog_service_delete_aspect_type_sync.py" + "title": "dataplex_v1_generated_catalog_service_delete_entry_group_sync.py" }, { "canonical": true, @@ -1220,19 +4033,19 @@ "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", "shortName": "CatalogServiceAsyncClient" }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.delete_entry_group", + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.delete_entry_link", "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntryGroup", + "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntryLink", "service": { "fullName": "google.cloud.dataplex.v1.CatalogService", "shortName": "CatalogService" }, - "shortName": "DeleteEntryGroup" + "shortName": "DeleteEntryLink" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteEntryGroupRequest" + "type": "google.cloud.dataplex_v1.types.DeleteEntryLinkRequest" }, { "name": "name", @@ -1251,22 +4064,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_entry_group" + "resultType": "google.cloud.dataplex_v1.types.EntryLink", + "shortName": "delete_entry_link" }, - "description": "Sample for DeleteEntryGroup", - "file": "dataplex_v1_generated_catalog_service_delete_entry_group_async.py", + "description": "Sample for DeleteEntryLink", + "file": "dataplex_v1_generated_catalog_service_delete_entry_link_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntryGroup_async", + "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntryLink_async", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1281,17 +4094,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "dataplex_v1_generated_catalog_service_delete_entry_group_async.py" + "title": "dataplex_v1_generated_catalog_service_delete_entry_link_async.py" }, { "canonical": true, @@ -1300,19 +4113,19 @@ "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", "shortName": "CatalogServiceClient" }, - "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.delete_entry_group", + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.delete_entry_link", "method": { - "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntryGroup", + "fullName": "google.cloud.dataplex.v1.CatalogService.DeleteEntryLink", "service": { "fullName": "google.cloud.dataplex.v1.CatalogService", "shortName": "CatalogService" }, - "shortName": "DeleteEntryGroup" + "shortName": "DeleteEntryLink" }, "parameters": [ { "name": "request", - "type": "google.cloud.dataplex_v1.types.DeleteEntryGroupRequest" + "type": "google.cloud.dataplex_v1.types.DeleteEntryLinkRequest" }, { "name": "name", @@ -1331,22 +4144,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.operation.Operation", - "shortName": "delete_entry_group" + "resultType": "google.cloud.dataplex_v1.types.EntryLink", + "shortName": "delete_entry_link" }, - "description": "Sample for DeleteEntryGroup", - "file": "dataplex_v1_generated_catalog_service_delete_entry_group_sync.py", + "description": "Sample for DeleteEntryLink", + "file": "dataplex_v1_generated_catalog_service_delete_entry_link_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntryGroup_sync", + "regionTag": "dataplex_v1_generated_CatalogService_DeleteEntryLink_sync", "segments": [ { - "end": 55, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 55, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1361,17 +4174,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 52, + "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 56, - "start": 53, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "dataplex_v1_generated_catalog_service_delete_entry_group_sync.py" + "title": "dataplex_v1_generated_catalog_service_delete_entry_link_sync.py" }, { "canonical": true, @@ -2017,6 +4830,167 @@ ], "title": "dataplex_v1_generated_catalog_service_get_entry_group_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient", + "shortName": "CatalogServiceAsyncClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceAsyncClient.get_entry_link", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryLink", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "GetEntryLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetEntryLinkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.EntryLink", + "shortName": "get_entry_link" + }, + "description": "Sample for GetEntryLink", + "file": "dataplex_v1_generated_catalog_service_get_entry_link_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_GetEntryLink_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_get_entry_link_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient", + "shortName": "CatalogServiceClient" + }, + "fullName": "google.cloud.dataplex_v1.CatalogServiceClient.get_entry_link", + "method": { + "fullName": "google.cloud.dataplex.v1.CatalogService.GetEntryLink", + "service": { + "fullName": "google.cloud.dataplex.v1.CatalogService", + "shortName": "CatalogService" + }, + "shortName": "GetEntryLink" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.dataplex_v1.types.GetEntryLinkRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.dataplex_v1.types.EntryLink", + "shortName": "get_entry_link" + }, + "description": "Sample for GetEntryLink", + "file": "dataplex_v1_generated_catalog_service_get_entry_link_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "dataplex_v1_generated_CatalogService_GetEntryLink_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "dataplex_v1_generated_catalog_service_get_entry_link_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-dataplex/scripts/client-post-processing/doc-formatting.yaml b/packages/google-cloud-dataplex/scripts/client-post-processing/doc-formatting.yaml new file mode 120000 index 000000000000..6e0991666f97 --- /dev/null +++ b/packages/google-cloud-dataplex/scripts/client-post-processing/doc-formatting.yaml @@ -0,0 +1 @@ +../../../../scripts/client-post-processing/doc-formatting.yaml \ No newline at end of file diff --git a/packages/google-cloud-dataplex/scripts/fixup_dataplex_v1_keywords.py b/packages/google-cloud-dataplex/scripts/fixup_dataplex_v1_keywords.py index 59d5de09c6c0..e054db378114 100644 --- a/packages/google-cloud-dataplex/scripts/fixup_dataplex_v1_keywords.py +++ b/packages/google-cloud-dataplex/scripts/fixup_dataplex_v1_keywords.py @@ -52,8 +52,12 @@ class dataplexCallTransformer(cst.CSTTransformer): 'create_entity': ('parent', 'entity', 'validate_only', ), 'create_entry': ('parent', 'entry_id', 'entry', ), 'create_entry_group': ('parent', 'entry_group_id', 'entry_group', 'validate_only', ), + 'create_entry_link': ('parent', 'entry_link_id', 'entry_link', ), 'create_entry_type': ('parent', 'entry_type_id', 'entry_type', 'validate_only', ), 'create_environment': ('parent', 'environment_id', 'environment', 'validate_only', ), + 'create_glossary': ('parent', 'glossary_id', 'glossary', 'validate_only', ), + 'create_glossary_category': ('parent', 'category_id', 'category', ), + 'create_glossary_term': ('parent', 'term_id', 'term', ), 'create_lake': ('parent', 'lake_id', 'lake', 'validate_only', ), 'create_metadata_job': ('parent', 'metadata_job', 'metadata_job_id', 'validate_only', ), 'create_partition': ('parent', 'partition', 'validate_only', ), @@ -70,8 +74,12 @@ class dataplexCallTransformer(cst.CSTTransformer): 'delete_entity': ('name', 'etag', ), 'delete_entry': ('name', ), 'delete_entry_group': ('name', 'etag', ), + 'delete_entry_link': ('name', ), 'delete_entry_type': ('name', 'etag', ), 'delete_environment': ('name', ), + 'delete_glossary': ('name', 'etag', ), + 'delete_glossary_category': ('name', ), + 'delete_glossary_term': ('name', ), 'delete_lake': ('name', ), 'delete_partition': ('name', 'etag', ), 'delete_task': ('name', ), @@ -89,8 +97,12 @@ class dataplexCallTransformer(cst.CSTTransformer): 'get_entity': ('name', 'view', ), 'get_entry': ('name', 'view', 'aspect_types', 'paths', ), 'get_entry_group': ('name', ), + 'get_entry_link': ('name', ), 'get_entry_type': ('name', ), 'get_environment': ('name', ), + 'get_glossary': ('name', ), + 'get_glossary_category': ('name', ), + 'get_glossary_term': ('name', ), 'get_iam_policy': ('resource', 'options', ), 'get_job': ('name', ), 'get_lake': ('name', ), @@ -113,6 +125,9 @@ class dataplexCallTransformer(cst.CSTTransformer): 'list_entry_groups': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_entry_types': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_environments': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_glossaries': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_glossary_categories': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_glossary_terms': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), 'list_jobs': ('parent', 'page_size', 'page_token', ), 'list_lake_actions': ('parent', 'page_size', 'page_token', ), 'list_lakes': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), @@ -141,6 +156,9 @@ class dataplexCallTransformer(cst.CSTTransformer): 'update_entry_group': ('entry_group', 'update_mask', 'validate_only', ), 'update_entry_type': ('entry_type', 'update_mask', 'validate_only', ), 'update_environment': ('update_mask', 'environment', 'validate_only', ), + 'update_glossary': ('glossary', 'update_mask', 'validate_only', ), + 'update_glossary_category': ('category', 'update_mask', ), + 'update_glossary_term': ('term', 'update_mask', ), 'update_lake': ('update_mask', 'lake', 'validate_only', ), 'update_task': ('update_mask', 'task', 'validate_only', ), 'update_zone': ('update_mask', 'zone', 'validate_only', ), diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_business_glossary_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_business_glossary_service.py new file mode 100644 index 000000000000..5482cc9626ac --- /dev/null +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_business_glossary_service.py @@ -0,0 +1,15954 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +from collections.abc import AsyncIterable, Iterable +import json +import math + +from google.api_core import api_core_version +from google.protobuf import json_format +import grpc +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +from google.api_core import ( + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import operation_async # type: ignore +from google.api_core import retry as retries +import google.auth +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + +from google.cloud.dataplex_v1.services.business_glossary_service import ( + BusinessGlossaryServiceAsyncClient, + BusinessGlossaryServiceClient, + pagers, + transports, +) +from google.cloud.dataplex_v1.types import business_glossary, service + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert BusinessGlossaryServiceClient._get_default_mtls_endpoint(None) is None + assert ( + BusinessGlossaryServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + BusinessGlossaryServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + BusinessGlossaryServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + BusinessGlossaryServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + BusinessGlossaryServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +def test__read_environment_variables(): + assert BusinessGlossaryServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert BusinessGlossaryServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert BusinessGlossaryServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + BusinessGlossaryServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert BusinessGlossaryServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert BusinessGlossaryServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert BusinessGlossaryServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + BusinessGlossaryServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert BusinessGlossaryServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert BusinessGlossaryServiceClient._get_client_cert_source(None, False) is None + assert ( + BusinessGlossaryServiceClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + BusinessGlossaryServiceClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + BusinessGlossaryServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + BusinessGlossaryServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + BusinessGlossaryServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BusinessGlossaryServiceClient), +) +@mock.patch.object( + BusinessGlossaryServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BusinessGlossaryServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = BusinessGlossaryServiceClient._DEFAULT_UNIVERSE + default_endpoint = BusinessGlossaryServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = BusinessGlossaryServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + BusinessGlossaryServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + BusinessGlossaryServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == BusinessGlossaryServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + BusinessGlossaryServiceClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + BusinessGlossaryServiceClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == BusinessGlossaryServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + BusinessGlossaryServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == BusinessGlossaryServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + BusinessGlossaryServiceClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + BusinessGlossaryServiceClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + BusinessGlossaryServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + BusinessGlossaryServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + BusinessGlossaryServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + BusinessGlossaryServiceClient._get_universe_domain(None, None) + == BusinessGlossaryServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + BusinessGlossaryServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = BusinessGlossaryServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = BusinessGlossaryServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (BusinessGlossaryServiceClient, "grpc"), + (BusinessGlossaryServiceAsyncClient, "grpc_asyncio"), + (BusinessGlossaryServiceClient, "rest"), + ], +) +def test_business_glossary_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "dataplex.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataplex.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.BusinessGlossaryServiceGrpcTransport, "grpc"), + (transports.BusinessGlossaryServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.BusinessGlossaryServiceRestTransport, "rest"), + ], +) +def test_business_glossary_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (BusinessGlossaryServiceClient, "grpc"), + (BusinessGlossaryServiceAsyncClient, "grpc_asyncio"), + (BusinessGlossaryServiceClient, "rest"), + ], +) +def test_business_glossary_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "dataplex.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataplex.googleapis.com" + ) + + +def test_business_glossary_service_client_get_transport_class(): + transport = BusinessGlossaryServiceClient.get_transport_class() + available_transports = [ + transports.BusinessGlossaryServiceGrpcTransport, + transports.BusinessGlossaryServiceRestTransport, + ] + assert transport in available_transports + + transport = BusinessGlossaryServiceClient.get_transport_class("grpc") + assert transport == transports.BusinessGlossaryServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + BusinessGlossaryServiceClient, + transports.BusinessGlossaryServiceGrpcTransport, + "grpc", + ), + ( + BusinessGlossaryServiceAsyncClient, + transports.BusinessGlossaryServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + BusinessGlossaryServiceClient, + transports.BusinessGlossaryServiceRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + BusinessGlossaryServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BusinessGlossaryServiceClient), +) +@mock.patch.object( + BusinessGlossaryServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BusinessGlossaryServiceAsyncClient), +) +def test_business_glossary_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(BusinessGlossaryServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(BusinessGlossaryServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + BusinessGlossaryServiceClient, + transports.BusinessGlossaryServiceGrpcTransport, + "grpc", + "true", + ), + ( + BusinessGlossaryServiceAsyncClient, + transports.BusinessGlossaryServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + BusinessGlossaryServiceClient, + transports.BusinessGlossaryServiceGrpcTransport, + "grpc", + "false", + ), + ( + BusinessGlossaryServiceAsyncClient, + transports.BusinessGlossaryServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + BusinessGlossaryServiceClient, + transports.BusinessGlossaryServiceRestTransport, + "rest", + "true", + ), + ( + BusinessGlossaryServiceClient, + transports.BusinessGlossaryServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + BusinessGlossaryServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BusinessGlossaryServiceClient), +) +@mock.patch.object( + BusinessGlossaryServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BusinessGlossaryServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_business_glossary_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [BusinessGlossaryServiceClient, BusinessGlossaryServiceAsyncClient] +) +@mock.patch.object( + BusinessGlossaryServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(BusinessGlossaryServiceClient), +) +@mock.patch.object( + BusinessGlossaryServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(BusinessGlossaryServiceAsyncClient), +) +def test_business_glossary_service_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [BusinessGlossaryServiceClient, BusinessGlossaryServiceAsyncClient] +) +@mock.patch.object( + BusinessGlossaryServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BusinessGlossaryServiceClient), +) +@mock.patch.object( + BusinessGlossaryServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(BusinessGlossaryServiceAsyncClient), +) +def test_business_glossary_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = BusinessGlossaryServiceClient._DEFAULT_UNIVERSE + default_endpoint = BusinessGlossaryServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = BusinessGlossaryServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + BusinessGlossaryServiceClient, + transports.BusinessGlossaryServiceGrpcTransport, + "grpc", + ), + ( + BusinessGlossaryServiceAsyncClient, + transports.BusinessGlossaryServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ( + BusinessGlossaryServiceClient, + transports.BusinessGlossaryServiceRestTransport, + "rest", + ), + ], +) +def test_business_glossary_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + BusinessGlossaryServiceClient, + transports.BusinessGlossaryServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + BusinessGlossaryServiceAsyncClient, + transports.BusinessGlossaryServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + BusinessGlossaryServiceClient, + transports.BusinessGlossaryServiceRestTransport, + "rest", + None, + ), + ], +) +def test_business_glossary_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_business_glossary_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.dataplex_v1.services.business_glossary_service.transports.BusinessGlossaryServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = BusinessGlossaryServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + BusinessGlossaryServiceClient, + transports.BusinessGlossaryServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + BusinessGlossaryServiceAsyncClient, + transports.BusinessGlossaryServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_business_glossary_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "dataplex.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="dataplex.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.CreateGlossaryRequest, + dict, + ], +) +def test_create_glossary(request_type, transport: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = business_glossary.CreateGlossaryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_glossary_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = business_glossary.CreateGlossaryRequest( + parent="parent_value", + glossary_id="glossary_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_glossary(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == business_glossary.CreateGlossaryRequest( + parent="parent_value", + glossary_id="glossary_id_value", + ) + + +def test_create_glossary_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_glossary in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_glossary] = mock_rpc + request = {} + client.create_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_glossary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_glossary_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_glossary + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_glossary + ] = mock_rpc + + request = {} + await client.create_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_glossary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_glossary_async( + transport: str = "grpc_asyncio", + request_type=business_glossary.CreateGlossaryRequest, +): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = business_glossary.CreateGlossaryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_glossary_async_from_dict(): + await test_create_glossary_async(request_type=dict) + + +def test_create_glossary_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.CreateGlossaryRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_glossary_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.CreateGlossaryRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_glossary_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_glossary( + parent="parent_value", + glossary=business_glossary.Glossary(name="name_value"), + glossary_id="glossary_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].glossary + mock_val = business_glossary.Glossary(name="name_value") + assert arg == mock_val + arg = args[0].glossary_id + mock_val = "glossary_id_value" + assert arg == mock_val + + +def test_create_glossary_flattened_error(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_glossary( + business_glossary.CreateGlossaryRequest(), + parent="parent_value", + glossary=business_glossary.Glossary(name="name_value"), + glossary_id="glossary_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_glossary_flattened_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_glossary( + parent="parent_value", + glossary=business_glossary.Glossary(name="name_value"), + glossary_id="glossary_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].glossary + mock_val = business_glossary.Glossary(name="name_value") + assert arg == mock_val + arg = args[0].glossary_id + mock_val = "glossary_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_glossary_flattened_error_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_glossary( + business_glossary.CreateGlossaryRequest(), + parent="parent_value", + glossary=business_glossary.Glossary(name="name_value"), + glossary_id="glossary_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.UpdateGlossaryRequest, + dict, + ], +) +def test_update_glossary(request_type, transport: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_glossary), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = business_glossary.UpdateGlossaryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_glossary_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = business_glossary.UpdateGlossaryRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_glossary), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_glossary(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == business_glossary.UpdateGlossaryRequest() + + +def test_update_glossary_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_glossary in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_glossary] = mock_rpc + request = {} + client.update_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_glossary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_glossary_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_glossary + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_glossary + ] = mock_rpc + + request = {} + await client.update_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_glossary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_glossary_async( + transport: str = "grpc_asyncio", + request_type=business_glossary.UpdateGlossaryRequest, +): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_glossary), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = business_glossary.UpdateGlossaryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_glossary_async_from_dict(): + await test_update_glossary_async(request_type=dict) + + +def test_update_glossary_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.UpdateGlossaryRequest() + + request.glossary.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_glossary), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "glossary.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_glossary_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.UpdateGlossaryRequest() + + request.glossary.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_glossary), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "glossary.name=name_value", + ) in kw["metadata"] + + +def test_update_glossary_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_glossary), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_glossary( + glossary=business_glossary.Glossary(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].glossary + mock_val = business_glossary.Glossary(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_glossary_flattened_error(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_glossary( + business_glossary.UpdateGlossaryRequest(), + glossary=business_glossary.Glossary(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_glossary_flattened_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_glossary), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_glossary( + glossary=business_glossary.Glossary(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].glossary + mock_val = business_glossary.Glossary(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_glossary_flattened_error_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_glossary( + business_glossary.UpdateGlossaryRequest(), + glossary=business_glossary.Glossary(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.DeleteGlossaryRequest, + dict, + ], +) +def test_delete_glossary(request_type, transport: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = business_glossary.DeleteGlossaryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_glossary_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = business_glossary.DeleteGlossaryRequest( + name="name_value", + etag="etag_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_glossary(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == business_glossary.DeleteGlossaryRequest( + name="name_value", + etag="etag_value", + ) + + +def test_delete_glossary_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_glossary in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_glossary] = mock_rpc + request = {} + client.delete_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_glossary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_glossary_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_glossary + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_glossary + ] = mock_rpc + + request = {} + await client.delete_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_glossary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_glossary_async( + transport: str = "grpc_asyncio", + request_type=business_glossary.DeleteGlossaryRequest, +): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = business_glossary.DeleteGlossaryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_glossary_async_from_dict(): + await test_delete_glossary_async(request_type=dict) + + +def test_delete_glossary_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.DeleteGlossaryRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_glossary_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.DeleteGlossaryRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_glossary_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_glossary( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_glossary_flattened_error(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_glossary( + business_glossary.DeleteGlossaryRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_glossary_flattened_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_glossary( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_glossary_flattened_error_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_glossary( + business_glossary.DeleteGlossaryRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.GetGlossaryRequest, + dict, + ], +) +def test_get_glossary(request_type, transport: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.Glossary( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + term_count=1088, + category_count=1510, + etag="etag_value", + ) + response = client.get_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = business_glossary.GetGlossaryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.Glossary) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.term_count == 1088 + assert response.category_count == 1510 + assert response.etag == "etag_value" + + +def test_get_glossary_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = business_glossary.GetGlossaryRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_glossary(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == business_glossary.GetGlossaryRequest( + name="name_value", + ) + + +def test_get_glossary_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_glossary in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_glossary] = mock_rpc + request = {} + client.get_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_glossary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_glossary_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_glossary + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_glossary + ] = mock_rpc + + request = {} + await client.get_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_glossary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_glossary_async( + transport: str = "grpc_asyncio", request_type=business_glossary.GetGlossaryRequest +): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.Glossary( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + term_count=1088, + category_count=1510, + etag="etag_value", + ) + ) + response = await client.get_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = business_glossary.GetGlossaryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.Glossary) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.term_count == 1088 + assert response.category_count == 1510 + assert response.etag == "etag_value" + + +@pytest.mark.asyncio +async def test_get_glossary_async_from_dict(): + await test_get_glossary_async(request_type=dict) + + +def test_get_glossary_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.GetGlossaryRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: + call.return_value = business_glossary.Glossary() + client.get_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_glossary_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.GetGlossaryRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.Glossary() + ) + await client.get_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_glossary_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.Glossary() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_glossary( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_glossary_flattened_error(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_glossary( + business_glossary.GetGlossaryRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_glossary_flattened_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.Glossary() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.Glossary() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_glossary( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_glossary_flattened_error_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_glossary( + business_glossary.GetGlossaryRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.ListGlossariesRequest, + dict, + ], +) +def test_list_glossaries(request_type, transport: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.ListGlossariesResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], + ) + response = client.list_glossaries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = business_glossary.ListGlossariesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGlossariesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable_locations == ["unreachable_locations_value"] + + +def test_list_glossaries_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = business_glossary.ListGlossariesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_glossaries(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == business_glossary.ListGlossariesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_glossaries_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_glossaries in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_glossaries] = mock_rpc + request = {} + client.list_glossaries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_glossaries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_glossaries_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_glossaries + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_glossaries + ] = mock_rpc + + request = {} + await client.list_glossaries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_glossaries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_glossaries_async( + transport: str = "grpc_asyncio", + request_type=business_glossary.ListGlossariesRequest, +): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.ListGlossariesResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], + ) + ) + response = await client.list_glossaries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = business_glossary.ListGlossariesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGlossariesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable_locations == ["unreachable_locations_value"] + + +@pytest.mark.asyncio +async def test_list_glossaries_async_from_dict(): + await test_list_glossaries_async(request_type=dict) + + +def test_list_glossaries_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.ListGlossariesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: + call.return_value = business_glossary.ListGlossariesResponse() + client.list_glossaries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_glossaries_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.ListGlossariesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.ListGlossariesResponse() + ) + await client.list_glossaries(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_glossaries_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.ListGlossariesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_glossaries( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_glossaries_flattened_error(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_glossaries( + business_glossary.ListGlossariesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_glossaries_flattened_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.ListGlossariesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.ListGlossariesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_glossaries( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_glossaries_flattened_error_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_glossaries( + business_glossary.ListGlossariesRequest(), + parent="parent_value", + ) + + +def test_list_glossaries_pager(transport_name: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + business_glossary.ListGlossariesResponse( + glossaries=[ + business_glossary.Glossary(), + business_glossary.Glossary(), + business_glossary.Glossary(), + ], + next_page_token="abc", + ), + business_glossary.ListGlossariesResponse( + glossaries=[], + next_page_token="def", + ), + business_glossary.ListGlossariesResponse( + glossaries=[ + business_glossary.Glossary(), + ], + next_page_token="ghi", + ), + business_glossary.ListGlossariesResponse( + glossaries=[ + business_glossary.Glossary(), + business_glossary.Glossary(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_glossaries(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, business_glossary.Glossary) for i in results) + + +def test_list_glossaries_pages(transport_name: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + business_glossary.ListGlossariesResponse( + glossaries=[ + business_glossary.Glossary(), + business_glossary.Glossary(), + business_glossary.Glossary(), + ], + next_page_token="abc", + ), + business_glossary.ListGlossariesResponse( + glossaries=[], + next_page_token="def", + ), + business_glossary.ListGlossariesResponse( + glossaries=[ + business_glossary.Glossary(), + ], + next_page_token="ghi", + ), + business_glossary.ListGlossariesResponse( + glossaries=[ + business_glossary.Glossary(), + business_glossary.Glossary(), + ], + ), + RuntimeError, + ) + pages = list(client.list_glossaries(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_glossaries_async_pager(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossaries), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + business_glossary.ListGlossariesResponse( + glossaries=[ + business_glossary.Glossary(), + business_glossary.Glossary(), + business_glossary.Glossary(), + ], + next_page_token="abc", + ), + business_glossary.ListGlossariesResponse( + glossaries=[], + next_page_token="def", + ), + business_glossary.ListGlossariesResponse( + glossaries=[ + business_glossary.Glossary(), + ], + next_page_token="ghi", + ), + business_glossary.ListGlossariesResponse( + glossaries=[ + business_glossary.Glossary(), + business_glossary.Glossary(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_glossaries( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, business_glossary.Glossary) for i in responses) + + +@pytest.mark.asyncio +async def test_list_glossaries_async_pages(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossaries), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + business_glossary.ListGlossariesResponse( + glossaries=[ + business_glossary.Glossary(), + business_glossary.Glossary(), + business_glossary.Glossary(), + ], + next_page_token="abc", + ), + business_glossary.ListGlossariesResponse( + glossaries=[], + next_page_token="def", + ), + business_glossary.ListGlossariesResponse( + glossaries=[ + business_glossary.Glossary(), + ], + next_page_token="ghi", + ), + business_glossary.ListGlossariesResponse( + glossaries=[ + business_glossary.Glossary(), + business_glossary.Glossary(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_glossaries(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.CreateGlossaryCategoryRequest, + dict, + ], +) +def test_create_glossary_category(request_type, transport: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_category), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.GlossaryCategory( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", + ) + response = client.create_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = business_glossary.CreateGlossaryCategoryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.GlossaryCategory) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent == "parent_value" + + +def test_create_glossary_category_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = business_glossary.CreateGlossaryCategoryRequest( + parent="parent_value", + category_id="category_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_category), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_glossary_category(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == business_glossary.CreateGlossaryCategoryRequest( + parent="parent_value", + category_id="category_id_value", + ) + + +def test_create_glossary_category_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_glossary_category + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_glossary_category + ] = mock_rpc + request = {} + client.create_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_glossary_category(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_glossary_category_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_glossary_category + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_glossary_category + ] = mock_rpc + + request = {} + await client.create_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_glossary_category(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_glossary_category_async( + transport: str = "grpc_asyncio", + request_type=business_glossary.CreateGlossaryCategoryRequest, +): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_category), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryCategory( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", + ) + ) + response = await client.create_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = business_glossary.CreateGlossaryCategoryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.GlossaryCategory) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent == "parent_value" + + +@pytest.mark.asyncio +async def test_create_glossary_category_async_from_dict(): + await test_create_glossary_category_async(request_type=dict) + + +def test_create_glossary_category_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.CreateGlossaryCategoryRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_category), "__call__" + ) as call: + call.return_value = business_glossary.GlossaryCategory() + client.create_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_glossary_category_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.CreateGlossaryCategoryRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_category), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryCategory() + ) + await client.create_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_glossary_category_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_category), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.GlossaryCategory() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_glossary_category( + parent="parent_value", + category=business_glossary.GlossaryCategory(name="name_value"), + category_id="category_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].category + mock_val = business_glossary.GlossaryCategory(name="name_value") + assert arg == mock_val + arg = args[0].category_id + mock_val = "category_id_value" + assert arg == mock_val + + +def test_create_glossary_category_flattened_error(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_glossary_category( + business_glossary.CreateGlossaryCategoryRequest(), + parent="parent_value", + category=business_glossary.GlossaryCategory(name="name_value"), + category_id="category_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_glossary_category_flattened_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_category), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.GlossaryCategory() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryCategory() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_glossary_category( + parent="parent_value", + category=business_glossary.GlossaryCategory(name="name_value"), + category_id="category_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].category + mock_val = business_glossary.GlossaryCategory(name="name_value") + assert arg == mock_val + arg = args[0].category_id + mock_val = "category_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_glossary_category_flattened_error_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_glossary_category( + business_glossary.CreateGlossaryCategoryRequest(), + parent="parent_value", + category=business_glossary.GlossaryCategory(name="name_value"), + category_id="category_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.UpdateGlossaryCategoryRequest, + dict, + ], +) +def test_update_glossary_category(request_type, transport: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_category), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.GlossaryCategory( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", + ) + response = client.update_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = business_glossary.UpdateGlossaryCategoryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.GlossaryCategory) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent == "parent_value" + + +def test_update_glossary_category_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = business_glossary.UpdateGlossaryCategoryRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_category), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_glossary_category(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == business_glossary.UpdateGlossaryCategoryRequest() + + +def test_update_glossary_category_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_glossary_category + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_glossary_category + ] = mock_rpc + request = {} + client.update_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_glossary_category(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_glossary_category_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_glossary_category + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_glossary_category + ] = mock_rpc + + request = {} + await client.update_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_glossary_category(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_glossary_category_async( + transport: str = "grpc_asyncio", + request_type=business_glossary.UpdateGlossaryCategoryRequest, +): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_category), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryCategory( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", + ) + ) + response = await client.update_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = business_glossary.UpdateGlossaryCategoryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.GlossaryCategory) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent == "parent_value" + + +@pytest.mark.asyncio +async def test_update_glossary_category_async_from_dict(): + await test_update_glossary_category_async(request_type=dict) + + +def test_update_glossary_category_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.UpdateGlossaryCategoryRequest() + + request.category.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_category), "__call__" + ) as call: + call.return_value = business_glossary.GlossaryCategory() + client.update_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "category.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_glossary_category_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.UpdateGlossaryCategoryRequest() + + request.category.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_category), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryCategory() + ) + await client.update_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "category.name=name_value", + ) in kw["metadata"] + + +def test_update_glossary_category_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_category), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.GlossaryCategory() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_glossary_category( + category=business_glossary.GlossaryCategory(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].category + mock_val = business_glossary.GlossaryCategory(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_glossary_category_flattened_error(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_glossary_category( + business_glossary.UpdateGlossaryCategoryRequest(), + category=business_glossary.GlossaryCategory(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_glossary_category_flattened_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_category), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.GlossaryCategory() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryCategory() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_glossary_category( + category=business_glossary.GlossaryCategory(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].category + mock_val = business_glossary.GlossaryCategory(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_glossary_category_flattened_error_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_glossary_category( + business_glossary.UpdateGlossaryCategoryRequest(), + category=business_glossary.GlossaryCategory(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.DeleteGlossaryCategoryRequest, + dict, + ], +) +def test_delete_glossary_category(request_type, transport: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_category), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = business_glossary.DeleteGlossaryCategoryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_glossary_category_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = business_glossary.DeleteGlossaryCategoryRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_category), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_glossary_category(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == business_glossary.DeleteGlossaryCategoryRequest( + name="name_value", + ) + + +def test_delete_glossary_category_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_glossary_category + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_glossary_category + ] = mock_rpc + request = {} + client.delete_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_glossary_category(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_glossary_category_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_glossary_category + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_glossary_category + ] = mock_rpc + + request = {} + await client.delete_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_glossary_category(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_glossary_category_async( + transport: str = "grpc_asyncio", + request_type=business_glossary.DeleteGlossaryCategoryRequest, +): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_category), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = business_glossary.DeleteGlossaryCategoryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_glossary_category_async_from_dict(): + await test_delete_glossary_category_async(request_type=dict) + + +def test_delete_glossary_category_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.DeleteGlossaryCategoryRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_category), "__call__" + ) as call: + call.return_value = None + client.delete_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_glossary_category_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.DeleteGlossaryCategoryRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_category), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_glossary_category_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_category), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_glossary_category( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_glossary_category_flattened_error(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_glossary_category( + business_glossary.DeleteGlossaryCategoryRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_glossary_category_flattened_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_category), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_glossary_category( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_glossary_category_flattened_error_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_glossary_category( + business_glossary.DeleteGlossaryCategoryRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.GetGlossaryCategoryRequest, + dict, + ], +) +def test_get_glossary_category(request_type, transport: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_category), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.GlossaryCategory( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", + ) + response = client.get_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = business_glossary.GetGlossaryCategoryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.GlossaryCategory) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent == "parent_value" + + +def test_get_glossary_category_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = business_glossary.GetGlossaryCategoryRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_category), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_glossary_category(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == business_glossary.GetGlossaryCategoryRequest( + name="name_value", + ) + + +def test_get_glossary_category_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_glossary_category + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_glossary_category + ] = mock_rpc + request = {} + client.get_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_glossary_category(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_glossary_category_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_glossary_category + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_glossary_category + ] = mock_rpc + + request = {} + await client.get_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_glossary_category(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_glossary_category_async( + transport: str = "grpc_asyncio", + request_type=business_glossary.GetGlossaryCategoryRequest, +): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_category), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryCategory( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", + ) + ) + response = await client.get_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = business_glossary.GetGlossaryCategoryRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.GlossaryCategory) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent == "parent_value" + + +@pytest.mark.asyncio +async def test_get_glossary_category_async_from_dict(): + await test_get_glossary_category_async(request_type=dict) + + +def test_get_glossary_category_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.GetGlossaryCategoryRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_category), "__call__" + ) as call: + call.return_value = business_glossary.GlossaryCategory() + client.get_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_glossary_category_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.GetGlossaryCategoryRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_category), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryCategory() + ) + await client.get_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_glossary_category_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_category), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.GlossaryCategory() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_glossary_category( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_glossary_category_flattened_error(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_glossary_category( + business_glossary.GetGlossaryCategoryRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_glossary_category_flattened_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_category), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.GlossaryCategory() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryCategory() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_glossary_category( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_glossary_category_flattened_error_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_glossary_category( + business_glossary.GetGlossaryCategoryRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.ListGlossaryCategoriesRequest, + dict, + ], +) +def test_list_glossary_categories(request_type, transport: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_categories), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.ListGlossaryCategoriesResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], + ) + response = client.list_glossary_categories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = business_glossary.ListGlossaryCategoriesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGlossaryCategoriesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable_locations == ["unreachable_locations_value"] + + +def test_list_glossary_categories_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = business_glossary.ListGlossaryCategoriesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_categories), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_glossary_categories(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == business_glossary.ListGlossaryCategoriesRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_glossary_categories_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_glossary_categories + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_glossary_categories + ] = mock_rpc + request = {} + client.list_glossary_categories(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_glossary_categories(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_glossary_categories_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_glossary_categories + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_glossary_categories + ] = mock_rpc + + request = {} + await client.list_glossary_categories(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_glossary_categories(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_glossary_categories_async( + transport: str = "grpc_asyncio", + request_type=business_glossary.ListGlossaryCategoriesRequest, +): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_categories), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.ListGlossaryCategoriesResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], + ) + ) + response = await client.list_glossary_categories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = business_glossary.ListGlossaryCategoriesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGlossaryCategoriesAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable_locations == ["unreachable_locations_value"] + + +@pytest.mark.asyncio +async def test_list_glossary_categories_async_from_dict(): + await test_list_glossary_categories_async(request_type=dict) + + +def test_list_glossary_categories_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.ListGlossaryCategoriesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_categories), "__call__" + ) as call: + call.return_value = business_glossary.ListGlossaryCategoriesResponse() + client.list_glossary_categories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_glossary_categories_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.ListGlossaryCategoriesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_categories), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.ListGlossaryCategoriesResponse() + ) + await client.list_glossary_categories(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_glossary_categories_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_categories), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.ListGlossaryCategoriesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_glossary_categories( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_glossary_categories_flattened_error(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_glossary_categories( + business_glossary.ListGlossaryCategoriesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_glossary_categories_flattened_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_categories), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.ListGlossaryCategoriesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.ListGlossaryCategoriesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_glossary_categories( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_glossary_categories_flattened_error_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_glossary_categories( + business_glossary.ListGlossaryCategoriesRequest(), + parent="parent_value", + ) + + +def test_list_glossary_categories_pager(transport_name: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_categories), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + business_glossary.ListGlossaryCategoriesResponse( + categories=[ + business_glossary.GlossaryCategory(), + business_glossary.GlossaryCategory(), + business_glossary.GlossaryCategory(), + ], + next_page_token="abc", + ), + business_glossary.ListGlossaryCategoriesResponse( + categories=[], + next_page_token="def", + ), + business_glossary.ListGlossaryCategoriesResponse( + categories=[ + business_glossary.GlossaryCategory(), + ], + next_page_token="ghi", + ), + business_glossary.ListGlossaryCategoriesResponse( + categories=[ + business_glossary.GlossaryCategory(), + business_glossary.GlossaryCategory(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_glossary_categories( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, business_glossary.GlossaryCategory) for i in results) + + +def test_list_glossary_categories_pages(transport_name: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_categories), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + business_glossary.ListGlossaryCategoriesResponse( + categories=[ + business_glossary.GlossaryCategory(), + business_glossary.GlossaryCategory(), + business_glossary.GlossaryCategory(), + ], + next_page_token="abc", + ), + business_glossary.ListGlossaryCategoriesResponse( + categories=[], + next_page_token="def", + ), + business_glossary.ListGlossaryCategoriesResponse( + categories=[ + business_glossary.GlossaryCategory(), + ], + next_page_token="ghi", + ), + business_glossary.ListGlossaryCategoriesResponse( + categories=[ + business_glossary.GlossaryCategory(), + business_glossary.GlossaryCategory(), + ], + ), + RuntimeError, + ) + pages = list(client.list_glossary_categories(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_glossary_categories_async_pager(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_categories), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + business_glossary.ListGlossaryCategoriesResponse( + categories=[ + business_glossary.GlossaryCategory(), + business_glossary.GlossaryCategory(), + business_glossary.GlossaryCategory(), + ], + next_page_token="abc", + ), + business_glossary.ListGlossaryCategoriesResponse( + categories=[], + next_page_token="def", + ), + business_glossary.ListGlossaryCategoriesResponse( + categories=[ + business_glossary.GlossaryCategory(), + ], + next_page_token="ghi", + ), + business_glossary.ListGlossaryCategoriesResponse( + categories=[ + business_glossary.GlossaryCategory(), + business_glossary.GlossaryCategory(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_glossary_categories( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, business_glossary.GlossaryCategory) for i in responses) + + +@pytest.mark.asyncio +async def test_list_glossary_categories_async_pages(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_categories), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + business_glossary.ListGlossaryCategoriesResponse( + categories=[ + business_glossary.GlossaryCategory(), + business_glossary.GlossaryCategory(), + business_glossary.GlossaryCategory(), + ], + next_page_token="abc", + ), + business_glossary.ListGlossaryCategoriesResponse( + categories=[], + next_page_token="def", + ), + business_glossary.ListGlossaryCategoriesResponse( + categories=[ + business_glossary.GlossaryCategory(), + ], + next_page_token="ghi", + ), + business_glossary.ListGlossaryCategoriesResponse( + categories=[ + business_glossary.GlossaryCategory(), + business_glossary.GlossaryCategory(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_glossary_categories(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.CreateGlossaryTermRequest, + dict, + ], +) +def test_create_glossary_term(request_type, transport: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_term), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.GlossaryTerm( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", + ) + response = client.create_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = business_glossary.CreateGlossaryTermRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.GlossaryTerm) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent == "parent_value" + + +def test_create_glossary_term_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = business_glossary.CreateGlossaryTermRequest( + parent="parent_value", + term_id="term_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_term), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_glossary_term(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == business_glossary.CreateGlossaryTermRequest( + parent="parent_value", + term_id="term_id_value", + ) + + +def test_create_glossary_term_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_glossary_term in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_glossary_term + ] = mock_rpc + request = {} + client.create_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_glossary_term(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_glossary_term_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_glossary_term + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_glossary_term + ] = mock_rpc + + request = {} + await client.create_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_glossary_term(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_glossary_term_async( + transport: str = "grpc_asyncio", + request_type=business_glossary.CreateGlossaryTermRequest, +): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_term), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryTerm( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", + ) + ) + response = await client.create_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = business_glossary.CreateGlossaryTermRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.GlossaryTerm) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent == "parent_value" + + +@pytest.mark.asyncio +async def test_create_glossary_term_async_from_dict(): + await test_create_glossary_term_async(request_type=dict) + + +def test_create_glossary_term_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.CreateGlossaryTermRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_term), "__call__" + ) as call: + call.return_value = business_glossary.GlossaryTerm() + client.create_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_glossary_term_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.CreateGlossaryTermRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_term), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryTerm() + ) + await client.create_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_glossary_term_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_term), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.GlossaryTerm() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_glossary_term( + parent="parent_value", + term=business_glossary.GlossaryTerm(name="name_value"), + term_id="term_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].term + mock_val = business_glossary.GlossaryTerm(name="name_value") + assert arg == mock_val + arg = args[0].term_id + mock_val = "term_id_value" + assert arg == mock_val + + +def test_create_glossary_term_flattened_error(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_glossary_term( + business_glossary.CreateGlossaryTermRequest(), + parent="parent_value", + term=business_glossary.GlossaryTerm(name="name_value"), + term_id="term_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_glossary_term_flattened_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_term), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.GlossaryTerm() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryTerm() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_glossary_term( + parent="parent_value", + term=business_glossary.GlossaryTerm(name="name_value"), + term_id="term_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].term + mock_val = business_glossary.GlossaryTerm(name="name_value") + assert arg == mock_val + arg = args[0].term_id + mock_val = "term_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_glossary_term_flattened_error_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_glossary_term( + business_glossary.CreateGlossaryTermRequest(), + parent="parent_value", + term=business_glossary.GlossaryTerm(name="name_value"), + term_id="term_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.UpdateGlossaryTermRequest, + dict, + ], +) +def test_update_glossary_term(request_type, transport: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_term), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.GlossaryTerm( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", + ) + response = client.update_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = business_glossary.UpdateGlossaryTermRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.GlossaryTerm) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent == "parent_value" + + +def test_update_glossary_term_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = business_glossary.UpdateGlossaryTermRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_term), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_glossary_term(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == business_glossary.UpdateGlossaryTermRequest() + + +def test_update_glossary_term_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_glossary_term in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_glossary_term + ] = mock_rpc + request = {} + client.update_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_glossary_term(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_glossary_term_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_glossary_term + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_glossary_term + ] = mock_rpc + + request = {} + await client.update_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_glossary_term(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_glossary_term_async( + transport: str = "grpc_asyncio", + request_type=business_glossary.UpdateGlossaryTermRequest, +): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_term), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryTerm( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", + ) + ) + response = await client.update_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = business_glossary.UpdateGlossaryTermRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.GlossaryTerm) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent == "parent_value" + + +@pytest.mark.asyncio +async def test_update_glossary_term_async_from_dict(): + await test_update_glossary_term_async(request_type=dict) + + +def test_update_glossary_term_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.UpdateGlossaryTermRequest() + + request.term.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_term), "__call__" + ) as call: + call.return_value = business_glossary.GlossaryTerm() + client.update_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "term.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_glossary_term_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.UpdateGlossaryTermRequest() + + request.term.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_term), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryTerm() + ) + await client.update_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "term.name=name_value", + ) in kw["metadata"] + + +def test_update_glossary_term_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_term), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.GlossaryTerm() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_glossary_term( + term=business_glossary.GlossaryTerm(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].term + mock_val = business_glossary.GlossaryTerm(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_glossary_term_flattened_error(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_glossary_term( + business_glossary.UpdateGlossaryTermRequest(), + term=business_glossary.GlossaryTerm(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_glossary_term_flattened_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_term), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.GlossaryTerm() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryTerm() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_glossary_term( + term=business_glossary.GlossaryTerm(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].term + mock_val = business_glossary.GlossaryTerm(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_glossary_term_flattened_error_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_glossary_term( + business_glossary.UpdateGlossaryTermRequest(), + term=business_glossary.GlossaryTerm(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.DeleteGlossaryTermRequest, + dict, + ], +) +def test_delete_glossary_term(request_type, transport: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_term), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = business_glossary.DeleteGlossaryTermRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_glossary_term_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = business_glossary.DeleteGlossaryTermRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_term), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_glossary_term(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == business_glossary.DeleteGlossaryTermRequest( + name="name_value", + ) + + +def test_delete_glossary_term_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_glossary_term in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_glossary_term + ] = mock_rpc + request = {} + client.delete_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_glossary_term(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_glossary_term_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_glossary_term + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_glossary_term + ] = mock_rpc + + request = {} + await client.delete_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_glossary_term(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_glossary_term_async( + transport: str = "grpc_asyncio", + request_type=business_glossary.DeleteGlossaryTermRequest, +): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_term), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = business_glossary.DeleteGlossaryTermRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_glossary_term_async_from_dict(): + await test_delete_glossary_term_async(request_type=dict) + + +def test_delete_glossary_term_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.DeleteGlossaryTermRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_term), "__call__" + ) as call: + call.return_value = None + client.delete_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_glossary_term_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.DeleteGlossaryTermRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_term), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_glossary_term_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_term), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_glossary_term( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_glossary_term_flattened_error(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_glossary_term( + business_glossary.DeleteGlossaryTermRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_glossary_term_flattened_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_term), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_glossary_term( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_glossary_term_flattened_error_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_glossary_term( + business_glossary.DeleteGlossaryTermRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.GetGlossaryTermRequest, + dict, + ], +) +def test_get_glossary_term(request_type, transport: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_term), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.GlossaryTerm( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", + ) + response = client.get_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = business_glossary.GetGlossaryTermRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.GlossaryTerm) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent == "parent_value" + + +def test_get_glossary_term_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = business_glossary.GetGlossaryTermRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_term), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_glossary_term(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == business_glossary.GetGlossaryTermRequest( + name="name_value", + ) + + +def test_get_glossary_term_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_glossary_term in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_glossary_term + ] = mock_rpc + request = {} + client.get_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_glossary_term(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_glossary_term_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_glossary_term + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_glossary_term + ] = mock_rpc + + request = {} + await client.get_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_glossary_term(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_glossary_term_async( + transport: str = "grpc_asyncio", + request_type=business_glossary.GetGlossaryTermRequest, +): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_term), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryTerm( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", + ) + ) + response = await client.get_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = business_glossary.GetGlossaryTermRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.GlossaryTerm) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent == "parent_value" + + +@pytest.mark.asyncio +async def test_get_glossary_term_async_from_dict(): + await test_get_glossary_term_async(request_type=dict) + + +def test_get_glossary_term_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.GetGlossaryTermRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_term), "__call__" + ) as call: + call.return_value = business_glossary.GlossaryTerm() + client.get_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_glossary_term_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.GetGlossaryTermRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_term), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryTerm() + ) + await client.get_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_glossary_term_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_term), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.GlossaryTerm() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_glossary_term( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_glossary_term_flattened_error(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_glossary_term( + business_glossary.GetGlossaryTermRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_glossary_term_flattened_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_term), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.GlossaryTerm() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryTerm() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_glossary_term( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_glossary_term_flattened_error_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_glossary_term( + business_glossary.GetGlossaryTermRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.ListGlossaryTermsRequest, + dict, + ], +) +def test_list_glossary_terms(request_type, transport: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_terms), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.ListGlossaryTermsResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], + ) + response = client.list_glossary_terms(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = business_glossary.ListGlossaryTermsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGlossaryTermsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable_locations == ["unreachable_locations_value"] + + +def test_list_glossary_terms_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = business_glossary.ListGlossaryTermsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_terms), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_glossary_terms(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == business_glossary.ListGlossaryTermsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_glossary_terms_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_glossary_terms in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_glossary_terms + ] = mock_rpc + request = {} + client.list_glossary_terms(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_glossary_terms(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_glossary_terms_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_glossary_terms + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_glossary_terms + ] = mock_rpc + + request = {} + await client.list_glossary_terms(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_glossary_terms(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_glossary_terms_async( + transport: str = "grpc_asyncio", + request_type=business_glossary.ListGlossaryTermsRequest, +): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_terms), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.ListGlossaryTermsResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], + ) + ) + response = await client.list_glossary_terms(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = business_glossary.ListGlossaryTermsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGlossaryTermsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable_locations == ["unreachable_locations_value"] + + +@pytest.mark.asyncio +async def test_list_glossary_terms_async_from_dict(): + await test_list_glossary_terms_async(request_type=dict) + + +def test_list_glossary_terms_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.ListGlossaryTermsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_terms), "__call__" + ) as call: + call.return_value = business_glossary.ListGlossaryTermsResponse() + client.list_glossary_terms(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_glossary_terms_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = business_glossary.ListGlossaryTermsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_terms), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.ListGlossaryTermsResponse() + ) + await client.list_glossary_terms(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_glossary_terms_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_terms), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.ListGlossaryTermsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_glossary_terms( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_glossary_terms_flattened_error(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_glossary_terms( + business_glossary.ListGlossaryTermsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_glossary_terms_flattened_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_terms), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = business_glossary.ListGlossaryTermsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.ListGlossaryTermsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_glossary_terms( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_glossary_terms_flattened_error_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_glossary_terms( + business_glossary.ListGlossaryTermsRequest(), + parent="parent_value", + ) + + +def test_list_glossary_terms_pager(transport_name: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_terms), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + business_glossary.ListGlossaryTermsResponse( + terms=[ + business_glossary.GlossaryTerm(), + business_glossary.GlossaryTerm(), + business_glossary.GlossaryTerm(), + ], + next_page_token="abc", + ), + business_glossary.ListGlossaryTermsResponse( + terms=[], + next_page_token="def", + ), + business_glossary.ListGlossaryTermsResponse( + terms=[ + business_glossary.GlossaryTerm(), + ], + next_page_token="ghi", + ), + business_glossary.ListGlossaryTermsResponse( + terms=[ + business_glossary.GlossaryTerm(), + business_glossary.GlossaryTerm(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_glossary_terms(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, business_glossary.GlossaryTerm) for i in results) + + +def test_list_glossary_terms_pages(transport_name: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_terms), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + business_glossary.ListGlossaryTermsResponse( + terms=[ + business_glossary.GlossaryTerm(), + business_glossary.GlossaryTerm(), + business_glossary.GlossaryTerm(), + ], + next_page_token="abc", + ), + business_glossary.ListGlossaryTermsResponse( + terms=[], + next_page_token="def", + ), + business_glossary.ListGlossaryTermsResponse( + terms=[ + business_glossary.GlossaryTerm(), + ], + next_page_token="ghi", + ), + business_glossary.ListGlossaryTermsResponse( + terms=[ + business_glossary.GlossaryTerm(), + business_glossary.GlossaryTerm(), + ], + ), + RuntimeError, + ) + pages = list(client.list_glossary_terms(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_glossary_terms_async_pager(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_terms), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + business_glossary.ListGlossaryTermsResponse( + terms=[ + business_glossary.GlossaryTerm(), + business_glossary.GlossaryTerm(), + business_glossary.GlossaryTerm(), + ], + next_page_token="abc", + ), + business_glossary.ListGlossaryTermsResponse( + terms=[], + next_page_token="def", + ), + business_glossary.ListGlossaryTermsResponse( + terms=[ + business_glossary.GlossaryTerm(), + ], + next_page_token="ghi", + ), + business_glossary.ListGlossaryTermsResponse( + terms=[ + business_glossary.GlossaryTerm(), + business_glossary.GlossaryTerm(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_glossary_terms( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, business_glossary.GlossaryTerm) for i in responses) + + +@pytest.mark.asyncio +async def test_list_glossary_terms_async_pages(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_terms), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + business_glossary.ListGlossaryTermsResponse( + terms=[ + business_glossary.GlossaryTerm(), + business_glossary.GlossaryTerm(), + business_glossary.GlossaryTerm(), + ], + next_page_token="abc", + ), + business_glossary.ListGlossaryTermsResponse( + terms=[], + next_page_token="def", + ), + business_glossary.ListGlossaryTermsResponse( + terms=[ + business_glossary.GlossaryTerm(), + ], + next_page_token="ghi", + ), + business_glossary.ListGlossaryTermsResponse( + terms=[ + business_glossary.GlossaryTerm(), + business_glossary.GlossaryTerm(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_glossary_terms(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_create_glossary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_glossary in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_glossary] = mock_rpc + + request = {} + client.create_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_glossary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_glossary_rest_required_fields( + request_type=business_glossary.CreateGlossaryRequest, +): + transport_class = transports.BusinessGlossaryServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["glossary_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "glossaryId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_glossary._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "glossaryId" in jsonified_request + assert jsonified_request["glossaryId"] == request_init["glossary_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["glossaryId"] = "glossary_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_glossary._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "glossary_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "glossaryId" in jsonified_request + assert jsonified_request["glossaryId"] == "glossary_id_value" + + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_glossary(request) + + expected_params = [ + ( + "glossaryId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_glossary_rest_unset_required_fields(): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_glossary._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "glossaryId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "glossaryId", + "glossary", + ) + ) + ) + + +def test_create_glossary_rest_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + glossary=business_glossary.Glossary(name="name_value"), + glossary_id="glossary_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_glossary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/glossaries" % client.transport._host, + args[1], + ) + + +def test_create_glossary_rest_flattened_error(transport: str = "rest"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_glossary( + business_glossary.CreateGlossaryRequest(), + parent="parent_value", + glossary=business_glossary.Glossary(name="name_value"), + glossary_id="glossary_id_value", + ) + + +def test_update_glossary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_glossary in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_glossary] = mock_rpc + + request = {} + client.update_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_glossary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_glossary_rest_required_fields( + request_type=business_glossary.UpdateGlossaryRequest, +): + transport_class = transports.BusinessGlossaryServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_glossary._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_glossary._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "update_mask", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_glossary(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_glossary_rest_unset_required_fields(): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_glossary._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "glossary", + "updateMask", + ) + ) + ) + + +def test_update_glossary_rest_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "glossary": { + "name": "projects/sample1/locations/sample2/glossaries/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + glossary=business_glossary.Glossary(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_glossary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{glossary.name=projects/*/locations/*/glossaries/*}" + % client.transport._host, + args[1], + ) + + +def test_update_glossary_rest_flattened_error(transport: str = "rest"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_glossary( + business_glossary.UpdateGlossaryRequest(), + glossary=business_glossary.Glossary(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_glossary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_glossary in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_glossary] = mock_rpc + + request = {} + client.delete_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_glossary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_glossary_rest_required_fields( + request_type=business_glossary.DeleteGlossaryRequest, +): + transport_class = transports.BusinessGlossaryServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_glossary._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_glossary._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("etag",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_glossary(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_glossary_rest_unset_required_fields(): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_glossary._get_unset_required_fields({}) + assert set(unset_fields) == (set(("etag",)) & set(("name",))) + + +def test_delete_glossary_rest_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/glossaries/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_glossary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/glossaries/*}" % client.transport._host, + args[1], + ) + + +def test_delete_glossary_rest_flattened_error(transport: str = "rest"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_glossary( + business_glossary.DeleteGlossaryRequest(), + name="name_value", + ) + + +def test_get_glossary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_glossary in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_glossary] = mock_rpc + + request = {} + client.get_glossary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_glossary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_glossary_rest_required_fields( + request_type=business_glossary.GetGlossaryRequest, +): + transport_class = transports.BusinessGlossaryServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_glossary._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_glossary._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = business_glossary.Glossary() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.Glossary.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_glossary(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_glossary_rest_unset_required_fields(): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_glossary._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_glossary_rest_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.Glossary() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/glossaries/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = business_glossary.Glossary.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_glossary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/glossaries/*}" % client.transport._host, + args[1], + ) + + +def test_get_glossary_rest_flattened_error(transport: str = "rest"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_glossary( + business_glossary.GetGlossaryRequest(), + name="name_value", + ) + + +def test_list_glossaries_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_glossaries in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_glossaries] = mock_rpc + + request = {} + client.list_glossaries(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_glossaries(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_glossaries_rest_required_fields( + request_type=business_glossary.ListGlossariesRequest, +): + transport_class = transports.BusinessGlossaryServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_glossaries._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_glossaries._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = business_glossary.ListGlossariesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.ListGlossariesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_glossaries(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_glossaries_rest_unset_required_fields(): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_glossaries._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_glossaries_rest_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.ListGlossariesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = business_glossary.ListGlossariesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_glossaries(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/glossaries" % client.transport._host, + args[1], + ) + + +def test_list_glossaries_rest_flattened_error(transport: str = "rest"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_glossaries( + business_glossary.ListGlossariesRequest(), + parent="parent_value", + ) + + +def test_list_glossaries_rest_pager(transport: str = "rest"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + business_glossary.ListGlossariesResponse( + glossaries=[ + business_glossary.Glossary(), + business_glossary.Glossary(), + business_glossary.Glossary(), + ], + next_page_token="abc", + ), + business_glossary.ListGlossariesResponse( + glossaries=[], + next_page_token="def", + ), + business_glossary.ListGlossariesResponse( + glossaries=[ + business_glossary.Glossary(), + ], + next_page_token="ghi", + ), + business_glossary.ListGlossariesResponse( + glossaries=[ + business_glossary.Glossary(), + business_glossary.Glossary(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + business_glossary.ListGlossariesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_glossaries(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, business_glossary.Glossary) for i in results) + + pages = list(client.list_glossaries(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_create_glossary_category_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_glossary_category + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_glossary_category + ] = mock_rpc + + request = {} + client.create_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_glossary_category(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_glossary_category_rest_required_fields( + request_type=business_glossary.CreateGlossaryCategoryRequest, +): + transport_class = transports.BusinessGlossaryServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["category_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "categoryId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_glossary_category._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "categoryId" in jsonified_request + assert jsonified_request["categoryId"] == request_init["category_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["categoryId"] = "category_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_glossary_category._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("category_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "categoryId" in jsonified_request + assert jsonified_request["categoryId"] == "category_id_value" + + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = business_glossary.GlossaryCategory() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.GlossaryCategory.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_glossary_category(request) + + expected_params = [ + ( + "categoryId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_glossary_category_rest_unset_required_fields(): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_glossary_category._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("categoryId",)) + & set( + ( + "parent", + "categoryId", + "category", + ) + ) + ) + + +def test_create_glossary_category_rest_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.GlossaryCategory() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/glossaries/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + category=business_glossary.GlossaryCategory(name="name_value"), + category_id="category_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = business_glossary.GlossaryCategory.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_glossary_category(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/glossaries/*}/categories" + % client.transport._host, + args[1], + ) + + +def test_create_glossary_category_rest_flattened_error(transport: str = "rest"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_glossary_category( + business_glossary.CreateGlossaryCategoryRequest(), + parent="parent_value", + category=business_glossary.GlossaryCategory(name="name_value"), + category_id="category_id_value", + ) + + +def test_update_glossary_category_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_glossary_category + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_glossary_category + ] = mock_rpc + + request = {} + client.update_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_glossary_category(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_glossary_category_rest_required_fields( + request_type=business_glossary.UpdateGlossaryCategoryRequest, +): + transport_class = transports.BusinessGlossaryServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_glossary_category._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_glossary_category._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = business_glossary.GlossaryCategory() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.GlossaryCategory.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_glossary_category(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_glossary_category_rest_unset_required_fields(): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_glossary_category._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "category", + "updateMask", + ) + ) + ) + + +def test_update_glossary_category_rest_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.GlossaryCategory() + + # get arguments that satisfy an http rule for this method + sample_request = { + "category": { + "name": "projects/sample1/locations/sample2/glossaries/sample3/categories/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + category=business_glossary.GlossaryCategory(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = business_glossary.GlossaryCategory.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_glossary_category(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{category.name=projects/*/locations/*/glossaries/*/categories/*}" + % client.transport._host, + args[1], + ) + + +def test_update_glossary_category_rest_flattened_error(transport: str = "rest"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_glossary_category( + business_glossary.UpdateGlossaryCategoryRequest(), + category=business_glossary.GlossaryCategory(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_glossary_category_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_glossary_category + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_glossary_category + ] = mock_rpc + + request = {} + client.delete_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_glossary_category(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_glossary_category_rest_required_fields( + request_type=business_glossary.DeleteGlossaryCategoryRequest, +): + transport_class = transports.BusinessGlossaryServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_glossary_category._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_glossary_category._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_glossary_category(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_glossary_category_rest_unset_required_fields(): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_glossary_category._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_delete_glossary_category_rest_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/glossaries/sample3/categories/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_glossary_category(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/glossaries/*/categories/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_glossary_category_rest_flattened_error(transport: str = "rest"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_glossary_category( + business_glossary.DeleteGlossaryCategoryRequest(), + name="name_value", + ) + + +def test_get_glossary_category_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_glossary_category + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_glossary_category + ] = mock_rpc + + request = {} + client.get_glossary_category(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_glossary_category(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_glossary_category_rest_required_fields( + request_type=business_glossary.GetGlossaryCategoryRequest, +): + transport_class = transports.BusinessGlossaryServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_glossary_category._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_glossary_category._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = business_glossary.GlossaryCategory() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.GlossaryCategory.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_glossary_category(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_glossary_category_rest_unset_required_fields(): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_glossary_category._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_glossary_category_rest_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.GlossaryCategory() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/glossaries/sample3/categories/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = business_glossary.GlossaryCategory.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_glossary_category(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/glossaries/*/categories/*}" + % client.transport._host, + args[1], + ) + + +def test_get_glossary_category_rest_flattened_error(transport: str = "rest"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_glossary_category( + business_glossary.GetGlossaryCategoryRequest(), + name="name_value", + ) + + +def test_list_glossary_categories_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_glossary_categories + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_glossary_categories + ] = mock_rpc + + request = {} + client.list_glossary_categories(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_glossary_categories(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_glossary_categories_rest_required_fields( + request_type=business_glossary.ListGlossaryCategoriesRequest, +): + transport_class = transports.BusinessGlossaryServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_glossary_categories._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_glossary_categories._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = business_glossary.ListGlossaryCategoriesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.ListGlossaryCategoriesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_glossary_categories(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_glossary_categories_rest_unset_required_fields(): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_glossary_categories._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_glossary_categories_rest_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.ListGlossaryCategoriesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/glossaries/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = business_glossary.ListGlossaryCategoriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_glossary_categories(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/glossaries/*}/categories" + % client.transport._host, + args[1], + ) + + +def test_list_glossary_categories_rest_flattened_error(transport: str = "rest"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_glossary_categories( + business_glossary.ListGlossaryCategoriesRequest(), + parent="parent_value", + ) + + +def test_list_glossary_categories_rest_pager(transport: str = "rest"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + business_glossary.ListGlossaryCategoriesResponse( + categories=[ + business_glossary.GlossaryCategory(), + business_glossary.GlossaryCategory(), + business_glossary.GlossaryCategory(), + ], + next_page_token="abc", + ), + business_glossary.ListGlossaryCategoriesResponse( + categories=[], + next_page_token="def", + ), + business_glossary.ListGlossaryCategoriesResponse( + categories=[ + business_glossary.GlossaryCategory(), + ], + next_page_token="ghi", + ), + business_glossary.ListGlossaryCategoriesResponse( + categories=[ + business_glossary.GlossaryCategory(), + business_glossary.GlossaryCategory(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + business_glossary.ListGlossaryCategoriesResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/glossaries/sample3" + } + + pager = client.list_glossary_categories(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, business_glossary.GlossaryCategory) for i in results) + + pages = list(client.list_glossary_categories(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_create_glossary_term_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_glossary_term in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_glossary_term + ] = mock_rpc + + request = {} + client.create_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_glossary_term(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_glossary_term_rest_required_fields( + request_type=business_glossary.CreateGlossaryTermRequest, +): + transport_class = transports.BusinessGlossaryServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["term_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "termId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_glossary_term._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "termId" in jsonified_request + assert jsonified_request["termId"] == request_init["term_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["termId"] = "term_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_glossary_term._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("term_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "termId" in jsonified_request + assert jsonified_request["termId"] == "term_id_value" + + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = business_glossary.GlossaryTerm() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.GlossaryTerm.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_glossary_term(request) + + expected_params = [ + ( + "termId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_glossary_term_rest_unset_required_fields(): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_glossary_term._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("termId",)) + & set( + ( + "parent", + "termId", + "term", + ) + ) + ) + + +def test_create_glossary_term_rest_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.GlossaryTerm() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/glossaries/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + term=business_glossary.GlossaryTerm(name="name_value"), + term_id="term_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = business_glossary.GlossaryTerm.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_glossary_term(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/glossaries/*}/terms" + % client.transport._host, + args[1], + ) + + +def test_create_glossary_term_rest_flattened_error(transport: str = "rest"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_glossary_term( + business_glossary.CreateGlossaryTermRequest(), + parent="parent_value", + term=business_glossary.GlossaryTerm(name="name_value"), + term_id="term_id_value", + ) + + +def test_update_glossary_term_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_glossary_term in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_glossary_term + ] = mock_rpc + + request = {} + client.update_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_glossary_term(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_glossary_term_rest_required_fields( + request_type=business_glossary.UpdateGlossaryTermRequest, +): + transport_class = transports.BusinessGlossaryServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_glossary_term._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_glossary_term._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = business_glossary.GlossaryTerm() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.GlossaryTerm.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_glossary_term(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_glossary_term_rest_unset_required_fields(): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_glossary_term._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("updateMask",)) + & set( + ( + "term", + "updateMask", + ) + ) + ) + + +def test_update_glossary_term_rest_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.GlossaryTerm() + + # get arguments that satisfy an http rule for this method + sample_request = { + "term": { + "name": "projects/sample1/locations/sample2/glossaries/sample3/terms/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + term=business_glossary.GlossaryTerm(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = business_glossary.GlossaryTerm.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_glossary_term(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{term.name=projects/*/locations/*/glossaries/*/terms/*}" + % client.transport._host, + args[1], + ) + + +def test_update_glossary_term_rest_flattened_error(transport: str = "rest"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_glossary_term( + business_glossary.UpdateGlossaryTermRequest(), + term=business_glossary.GlossaryTerm(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_glossary_term_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_glossary_term in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_glossary_term + ] = mock_rpc + + request = {} + client.delete_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_glossary_term(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_glossary_term_rest_required_fields( + request_type=business_glossary.DeleteGlossaryTermRequest, +): + transport_class = transports.BusinessGlossaryServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_glossary_term._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_glossary_term._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_glossary_term(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_glossary_term_rest_unset_required_fields(): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_glossary_term._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_delete_glossary_term_rest_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/glossaries/sample3/terms/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_glossary_term(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/glossaries/*/terms/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_glossary_term_rest_flattened_error(transport: str = "rest"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_glossary_term( + business_glossary.DeleteGlossaryTermRequest(), + name="name_value", + ) + + +def test_get_glossary_term_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_glossary_term in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_glossary_term + ] = mock_rpc + + request = {} + client.get_glossary_term(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_glossary_term(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_glossary_term_rest_required_fields( + request_type=business_glossary.GetGlossaryTermRequest, +): + transport_class = transports.BusinessGlossaryServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_glossary_term._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_glossary_term._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = business_glossary.GlossaryTerm() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.GlossaryTerm.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_glossary_term(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_glossary_term_rest_unset_required_fields(): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_glossary_term._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_glossary_term_rest_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.GlossaryTerm() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/glossaries/sample3/terms/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = business_glossary.GlossaryTerm.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_glossary_term(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/glossaries/*/terms/*}" + % client.transport._host, + args[1], + ) + + +def test_get_glossary_term_rest_flattened_error(transport: str = "rest"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_glossary_term( + business_glossary.GetGlossaryTermRequest(), + name="name_value", + ) + + +def test_list_glossary_terms_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_glossary_terms in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_glossary_terms + ] = mock_rpc + + request = {} + client.list_glossary_terms(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_glossary_terms(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_glossary_terms_rest_required_fields( + request_type=business_glossary.ListGlossaryTermsRequest, +): + transport_class = transports.BusinessGlossaryServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_glossary_terms._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_glossary_terms._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = business_glossary.ListGlossaryTermsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.ListGlossaryTermsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_glossary_terms(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_glossary_terms_rest_unset_required_fields(): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_glossary_terms._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_glossary_terms_rest_flattened(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.ListGlossaryTermsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/glossaries/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = business_glossary.ListGlossaryTermsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_glossary_terms(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/glossaries/*}/terms" + % client.transport._host, + args[1], + ) + + +def test_list_glossary_terms_rest_flattened_error(transport: str = "rest"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_glossary_terms( + business_glossary.ListGlossaryTermsRequest(), + parent="parent_value", + ) + + +def test_list_glossary_terms_rest_pager(transport: str = "rest"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + business_glossary.ListGlossaryTermsResponse( + terms=[ + business_glossary.GlossaryTerm(), + business_glossary.GlossaryTerm(), + business_glossary.GlossaryTerm(), + ], + next_page_token="abc", + ), + business_glossary.ListGlossaryTermsResponse( + terms=[], + next_page_token="def", + ), + business_glossary.ListGlossaryTermsResponse( + terms=[ + business_glossary.GlossaryTerm(), + ], + next_page_token="ghi", + ), + business_glossary.ListGlossaryTermsResponse( + terms=[ + business_glossary.GlossaryTerm(), + business_glossary.GlossaryTerm(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + business_glossary.ListGlossaryTermsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/glossaries/sample3" + } + + pager = client.list_glossary_terms(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, business_glossary.GlossaryTerm) for i in results) + + pages = list(client.list_glossary_terms(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.BusinessGlossaryServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.BusinessGlossaryServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BusinessGlossaryServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.BusinessGlossaryServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BusinessGlossaryServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = BusinessGlossaryServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.BusinessGlossaryServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = BusinessGlossaryServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.BusinessGlossaryServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = BusinessGlossaryServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.BusinessGlossaryServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.BusinessGlossaryServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.BusinessGlossaryServiceGrpcTransport, + transports.BusinessGlossaryServiceGrpcAsyncIOTransport, + transports.BusinessGlossaryServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = BusinessGlossaryServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_glossary_empty_call_grpc(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_glossary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.CreateGlossaryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_glossary_empty_call_grpc(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_glossary), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_glossary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.UpdateGlossaryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_glossary_empty_call_grpc(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_glossary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.DeleteGlossaryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_glossary_empty_call_grpc(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: + call.return_value = business_glossary.Glossary() + client.get_glossary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.GetGlossaryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_glossaries_empty_call_grpc(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: + call.return_value = business_glossary.ListGlossariesResponse() + client.list_glossaries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.ListGlossariesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_glossary_category_empty_call_grpc(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_category), "__call__" + ) as call: + call.return_value = business_glossary.GlossaryCategory() + client.create_glossary_category(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.CreateGlossaryCategoryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_glossary_category_empty_call_grpc(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_category), "__call__" + ) as call: + call.return_value = business_glossary.GlossaryCategory() + client.update_glossary_category(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.UpdateGlossaryCategoryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_glossary_category_empty_call_grpc(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_category), "__call__" + ) as call: + call.return_value = None + client.delete_glossary_category(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.DeleteGlossaryCategoryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_glossary_category_empty_call_grpc(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_category), "__call__" + ) as call: + call.return_value = business_glossary.GlossaryCategory() + client.get_glossary_category(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.GetGlossaryCategoryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_glossary_categories_empty_call_grpc(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_categories), "__call__" + ) as call: + call.return_value = business_glossary.ListGlossaryCategoriesResponse() + client.list_glossary_categories(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.ListGlossaryCategoriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_glossary_term_empty_call_grpc(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_term), "__call__" + ) as call: + call.return_value = business_glossary.GlossaryTerm() + client.create_glossary_term(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.CreateGlossaryTermRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_glossary_term_empty_call_grpc(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_term), "__call__" + ) as call: + call.return_value = business_glossary.GlossaryTerm() + client.update_glossary_term(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.UpdateGlossaryTermRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_glossary_term_empty_call_grpc(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_term), "__call__" + ) as call: + call.return_value = None + client.delete_glossary_term(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.DeleteGlossaryTermRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_glossary_term_empty_call_grpc(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_term), "__call__" + ) as call: + call.return_value = business_glossary.GlossaryTerm() + client.get_glossary_term(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.GetGlossaryTermRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_glossary_terms_empty_call_grpc(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_terms), "__call__" + ) as call: + call.return_value = business_glossary.ListGlossaryTermsResponse() + client.list_glossary_terms(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.ListGlossaryTermsRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = BusinessGlossaryServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_glossary_empty_call_grpc_asyncio(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_glossary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.CreateGlossaryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_glossary_empty_call_grpc_asyncio(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_glossary), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_glossary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.UpdateGlossaryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_glossary_empty_call_grpc_asyncio(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_glossary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.DeleteGlossaryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_glossary_empty_call_grpc_asyncio(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.Glossary( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + term_count=1088, + category_count=1510, + etag="etag_value", + ) + ) + await client.get_glossary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.GetGlossaryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_glossaries_empty_call_grpc_asyncio(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.ListGlossariesResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], + ) + ) + await client.list_glossaries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.ListGlossariesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_glossary_category_empty_call_grpc_asyncio(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_category), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryCategory( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", + ) + ) + await client.create_glossary_category(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.CreateGlossaryCategoryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_glossary_category_empty_call_grpc_asyncio(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_category), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryCategory( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", + ) + ) + await client.update_glossary_category(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.UpdateGlossaryCategoryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_glossary_category_empty_call_grpc_asyncio(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_category), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_glossary_category(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.DeleteGlossaryCategoryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_glossary_category_empty_call_grpc_asyncio(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_category), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryCategory( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", + ) + ) + await client.get_glossary_category(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.GetGlossaryCategoryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_glossary_categories_empty_call_grpc_asyncio(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_categories), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.ListGlossaryCategoriesResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], + ) + ) + await client.list_glossary_categories(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.ListGlossaryCategoriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_glossary_term_empty_call_grpc_asyncio(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_term), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryTerm( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", + ) + ) + await client.create_glossary_term(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.CreateGlossaryTermRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_glossary_term_empty_call_grpc_asyncio(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_term), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryTerm( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", + ) + ) + await client.update_glossary_term(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.UpdateGlossaryTermRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_glossary_term_empty_call_grpc_asyncio(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_term), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_glossary_term(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.DeleteGlossaryTermRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_glossary_term_empty_call_grpc_asyncio(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_term), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.GlossaryTerm( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", + ) + ) + await client.get_glossary_term(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.GetGlossaryTermRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_glossary_terms_empty_call_grpc_asyncio(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_terms), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + business_glossary.ListGlossaryTermsResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], + ) + ) + await client.list_glossary_terms(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.ListGlossaryTermsRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = BusinessGlossaryServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_create_glossary_rest_bad_request( + request_type=business_glossary.CreateGlossaryRequest, +): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_glossary(request) + + +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.CreateGlossaryRequest, + dict, + ], +) +def test_create_glossary_rest_call_success(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["glossary"] = { + "name": "name_value", + "uid": "uid_value", + "display_name": "display_name_value", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "term_count": 1088, + "category_count": 1510, + "etag": "etag_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = business_glossary.CreateGlossaryRequest.meta.fields["glossary"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["glossary"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["glossary"][field])): + del request_init["glossary"][field][i][subfield] + else: + del request_init["glossary"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_glossary(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_glossary_rest_interceptors(null_interceptor): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BusinessGlossaryServiceRestInterceptor(), + ) + client = BusinessGlossaryServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "post_create_glossary" + ) as post, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "post_create_glossary_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "pre_create_glossary" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = business_glossary.CreateGlossaryRequest.pb( + business_glossary.CreateGlossaryRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = business_glossary.CreateGlossaryRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_glossary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_glossary_rest_bad_request( + request_type=business_glossary.UpdateGlossaryRequest, +): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "glossary": {"name": "projects/sample1/locations/sample2/glossaries/sample3"} + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_glossary(request) + + +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.UpdateGlossaryRequest, + dict, + ], +) +def test_update_glossary_rest_call_success(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "glossary": {"name": "projects/sample1/locations/sample2/glossaries/sample3"} + } + request_init["glossary"] = { + "name": "projects/sample1/locations/sample2/glossaries/sample3", + "uid": "uid_value", + "display_name": "display_name_value", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "term_count": 1088, + "category_count": 1510, + "etag": "etag_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = business_glossary.UpdateGlossaryRequest.meta.fields["glossary"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["glossary"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["glossary"][field])): + del request_init["glossary"][field][i][subfield] + else: + del request_init["glossary"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_glossary(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_glossary_rest_interceptors(null_interceptor): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BusinessGlossaryServiceRestInterceptor(), + ) + client = BusinessGlossaryServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "post_update_glossary" + ) as post, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "post_update_glossary_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "pre_update_glossary" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = business_glossary.UpdateGlossaryRequest.pb( + business_glossary.UpdateGlossaryRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = business_glossary.UpdateGlossaryRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_glossary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_glossary_rest_bad_request( + request_type=business_glossary.DeleteGlossaryRequest, +): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/glossaries/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_glossary(request) + + +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.DeleteGlossaryRequest, + dict, + ], +) +def test_delete_glossary_rest_call_success(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/glossaries/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_glossary(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_glossary_rest_interceptors(null_interceptor): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BusinessGlossaryServiceRestInterceptor(), + ) + client = BusinessGlossaryServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "post_delete_glossary" + ) as post, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "post_delete_glossary_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "pre_delete_glossary" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = business_glossary.DeleteGlossaryRequest.pb( + business_glossary.DeleteGlossaryRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = business_glossary.DeleteGlossaryRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_glossary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_glossary_rest_bad_request( + request_type=business_glossary.GetGlossaryRequest, +): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/glossaries/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_glossary(request) + + +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.GetGlossaryRequest, + dict, + ], +) +def test_get_glossary_rest_call_success(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/glossaries/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.Glossary( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + term_count=1088, + category_count=1510, + etag="etag_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.Glossary.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_glossary(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.Glossary) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.term_count == 1088 + assert response.category_count == 1510 + assert response.etag == "etag_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_glossary_rest_interceptors(null_interceptor): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BusinessGlossaryServiceRestInterceptor(), + ) + client = BusinessGlossaryServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "post_get_glossary" + ) as post, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "post_get_glossary_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "pre_get_glossary" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = business_glossary.GetGlossaryRequest.pb( + business_glossary.GetGlossaryRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = business_glossary.Glossary.to_json(business_glossary.Glossary()) + req.return_value.content = return_value + + request = business_glossary.GetGlossaryRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = business_glossary.Glossary() + post_with_metadata.return_value = business_glossary.Glossary(), metadata + + client.get_glossary( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_glossaries_rest_bad_request( + request_type=business_glossary.ListGlossariesRequest, +): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_glossaries(request) + + +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.ListGlossariesRequest, + dict, + ], +) +def test_list_glossaries_rest_call_success(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.ListGlossariesResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.ListGlossariesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_glossaries(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGlossariesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable_locations == ["unreachable_locations_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_glossaries_rest_interceptors(null_interceptor): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BusinessGlossaryServiceRestInterceptor(), + ) + client = BusinessGlossaryServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "post_list_glossaries" + ) as post, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "post_list_glossaries_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "pre_list_glossaries" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = business_glossary.ListGlossariesRequest.pb( + business_glossary.ListGlossariesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = business_glossary.ListGlossariesResponse.to_json( + business_glossary.ListGlossariesResponse() + ) + req.return_value.content = return_value + + request = business_glossary.ListGlossariesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = business_glossary.ListGlossariesResponse() + post_with_metadata.return_value = ( + business_glossary.ListGlossariesResponse(), + metadata, + ) + + client.list_glossaries( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_glossary_category_rest_bad_request( + request_type=business_glossary.CreateGlossaryCategoryRequest, +): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/glossaries/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_glossary_category(request) + + +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.CreateGlossaryCategoryRequest, + dict, + ], +) +def test_create_glossary_category_rest_call_success(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/glossaries/sample3"} + request_init["category"] = { + "name": "name_value", + "uid": "uid_value", + "display_name": "display_name_value", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "parent": "parent_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = business_glossary.CreateGlossaryCategoryRequest.meta.fields["category"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["category"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["category"][field])): + del request_init["category"][field][i][subfield] + else: + del request_init["category"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.GlossaryCategory( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.GlossaryCategory.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_glossary_category(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.GlossaryCategory) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent == "parent_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_glossary_category_rest_interceptors(null_interceptor): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BusinessGlossaryServiceRestInterceptor(), + ) + client = BusinessGlossaryServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "post_create_glossary_category", + ) as post, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "post_create_glossary_category_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "pre_create_glossary_category", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = business_glossary.CreateGlossaryCategoryRequest.pb( + business_glossary.CreateGlossaryCategoryRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = business_glossary.GlossaryCategory.to_json( + business_glossary.GlossaryCategory() + ) + req.return_value.content = return_value + + request = business_glossary.CreateGlossaryCategoryRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = business_glossary.GlossaryCategory() + post_with_metadata.return_value = business_glossary.GlossaryCategory(), metadata + + client.create_glossary_category( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_glossary_category_rest_bad_request( + request_type=business_glossary.UpdateGlossaryCategoryRequest, +): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "category": { + "name": "projects/sample1/locations/sample2/glossaries/sample3/categories/sample4" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_glossary_category(request) + + +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.UpdateGlossaryCategoryRequest, + dict, + ], +) +def test_update_glossary_category_rest_call_success(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "category": { + "name": "projects/sample1/locations/sample2/glossaries/sample3/categories/sample4" + } + } + request_init["category"] = { + "name": "projects/sample1/locations/sample2/glossaries/sample3/categories/sample4", + "uid": "uid_value", + "display_name": "display_name_value", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "parent": "parent_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = business_glossary.UpdateGlossaryCategoryRequest.meta.fields["category"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["category"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["category"][field])): + del request_init["category"][field][i][subfield] + else: + del request_init["category"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.GlossaryCategory( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.GlossaryCategory.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_glossary_category(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.GlossaryCategory) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent == "parent_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_glossary_category_rest_interceptors(null_interceptor): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BusinessGlossaryServiceRestInterceptor(), + ) + client = BusinessGlossaryServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "post_update_glossary_category", + ) as post, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "post_update_glossary_category_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "pre_update_glossary_category", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = business_glossary.UpdateGlossaryCategoryRequest.pb( + business_glossary.UpdateGlossaryCategoryRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = business_glossary.GlossaryCategory.to_json( + business_glossary.GlossaryCategory() + ) + req.return_value.content = return_value + + request = business_glossary.UpdateGlossaryCategoryRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = business_glossary.GlossaryCategory() + post_with_metadata.return_value = business_glossary.GlossaryCategory(), metadata + + client.update_glossary_category( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_glossary_category_rest_bad_request( + request_type=business_glossary.DeleteGlossaryCategoryRequest, +): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/glossaries/sample3/categories/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_glossary_category(request) + + +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.DeleteGlossaryCategoryRequest, + dict, + ], +) +def test_delete_glossary_category_rest_call_success(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/glossaries/sample3/categories/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_glossary_category(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_glossary_category_rest_interceptors(null_interceptor): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BusinessGlossaryServiceRestInterceptor(), + ) + client = BusinessGlossaryServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "pre_delete_glossary_category", + ) as pre: + pre.assert_not_called() + pb_message = business_glossary.DeleteGlossaryCategoryRequest.pb( + business_glossary.DeleteGlossaryCategoryRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = business_glossary.DeleteGlossaryCategoryRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_glossary_category( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_get_glossary_category_rest_bad_request( + request_type=business_glossary.GetGlossaryCategoryRequest, +): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/glossaries/sample3/categories/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_glossary_category(request) + + +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.GetGlossaryCategoryRequest, + dict, + ], +) +def test_get_glossary_category_rest_call_success(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/glossaries/sample3/categories/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.GlossaryCategory( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.GlossaryCategory.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_glossary_category(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.GlossaryCategory) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent == "parent_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_glossary_category_rest_interceptors(null_interceptor): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BusinessGlossaryServiceRestInterceptor(), + ) + client = BusinessGlossaryServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "post_get_glossary_category" + ) as post, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "post_get_glossary_category_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "pre_get_glossary_category" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = business_glossary.GetGlossaryCategoryRequest.pb( + business_glossary.GetGlossaryCategoryRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = business_glossary.GlossaryCategory.to_json( + business_glossary.GlossaryCategory() + ) + req.return_value.content = return_value + + request = business_glossary.GetGlossaryCategoryRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = business_glossary.GlossaryCategory() + post_with_metadata.return_value = business_glossary.GlossaryCategory(), metadata + + client.get_glossary_category( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_glossary_categories_rest_bad_request( + request_type=business_glossary.ListGlossaryCategoriesRequest, +): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/glossaries/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_glossary_categories(request) + + +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.ListGlossaryCategoriesRequest, + dict, + ], +) +def test_list_glossary_categories_rest_call_success(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/glossaries/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.ListGlossaryCategoriesResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.ListGlossaryCategoriesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_glossary_categories(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGlossaryCategoriesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable_locations == ["unreachable_locations_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_glossary_categories_rest_interceptors(null_interceptor): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BusinessGlossaryServiceRestInterceptor(), + ) + client = BusinessGlossaryServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "post_list_glossary_categories", + ) as post, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "post_list_glossary_categories_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "pre_list_glossary_categories", + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = business_glossary.ListGlossaryCategoriesRequest.pb( + business_glossary.ListGlossaryCategoriesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = business_glossary.ListGlossaryCategoriesResponse.to_json( + business_glossary.ListGlossaryCategoriesResponse() + ) + req.return_value.content = return_value + + request = business_glossary.ListGlossaryCategoriesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = business_glossary.ListGlossaryCategoriesResponse() + post_with_metadata.return_value = ( + business_glossary.ListGlossaryCategoriesResponse(), + metadata, + ) + + client.list_glossary_categories( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_glossary_term_rest_bad_request( + request_type=business_glossary.CreateGlossaryTermRequest, +): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/glossaries/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_glossary_term(request) + + +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.CreateGlossaryTermRequest, + dict, + ], +) +def test_create_glossary_term_rest_call_success(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/glossaries/sample3"} + request_init["term"] = { + "name": "name_value", + "uid": "uid_value", + "display_name": "display_name_value", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "parent": "parent_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = business_glossary.CreateGlossaryTermRequest.meta.fields["term"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["term"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["term"][field])): + del request_init["term"][field][i][subfield] + else: + del request_init["term"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.GlossaryTerm( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.GlossaryTerm.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_glossary_term(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.GlossaryTerm) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent == "parent_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_glossary_term_rest_interceptors(null_interceptor): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BusinessGlossaryServiceRestInterceptor(), + ) + client = BusinessGlossaryServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "post_create_glossary_term" + ) as post, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "post_create_glossary_term_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "pre_create_glossary_term" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = business_glossary.CreateGlossaryTermRequest.pb( + business_glossary.CreateGlossaryTermRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = business_glossary.GlossaryTerm.to_json( + business_glossary.GlossaryTerm() + ) + req.return_value.content = return_value + + request = business_glossary.CreateGlossaryTermRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = business_glossary.GlossaryTerm() + post_with_metadata.return_value = business_glossary.GlossaryTerm(), metadata + + client.create_glossary_term( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_glossary_term_rest_bad_request( + request_type=business_glossary.UpdateGlossaryTermRequest, +): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "term": { + "name": "projects/sample1/locations/sample2/glossaries/sample3/terms/sample4" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_glossary_term(request) + + +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.UpdateGlossaryTermRequest, + dict, + ], +) +def test_update_glossary_term_rest_call_success(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "term": { + "name": "projects/sample1/locations/sample2/glossaries/sample3/terms/sample4" + } + } + request_init["term"] = { + "name": "projects/sample1/locations/sample2/glossaries/sample3/terms/sample4", + "uid": "uid_value", + "display_name": "display_name_value", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "parent": "parent_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = business_glossary.UpdateGlossaryTermRequest.meta.fields["term"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["term"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["term"][field])): + del request_init["term"][field][i][subfield] + else: + del request_init["term"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.GlossaryTerm( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.GlossaryTerm.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_glossary_term(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.GlossaryTerm) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent == "parent_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_glossary_term_rest_interceptors(null_interceptor): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BusinessGlossaryServiceRestInterceptor(), + ) + client = BusinessGlossaryServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "post_update_glossary_term" + ) as post, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "post_update_glossary_term_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "pre_update_glossary_term" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = business_glossary.UpdateGlossaryTermRequest.pb( + business_glossary.UpdateGlossaryTermRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = business_glossary.GlossaryTerm.to_json( + business_glossary.GlossaryTerm() + ) + req.return_value.content = return_value + + request = business_glossary.UpdateGlossaryTermRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = business_glossary.GlossaryTerm() + post_with_metadata.return_value = business_glossary.GlossaryTerm(), metadata + + client.update_glossary_term( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_glossary_term_rest_bad_request( + request_type=business_glossary.DeleteGlossaryTermRequest, +): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/glossaries/sample3/terms/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_glossary_term(request) + + +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.DeleteGlossaryTermRequest, + dict, + ], +) +def test_delete_glossary_term_rest_call_success(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/glossaries/sample3/terms/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_glossary_term(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_glossary_term_rest_interceptors(null_interceptor): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BusinessGlossaryServiceRestInterceptor(), + ) + client = BusinessGlossaryServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "pre_delete_glossary_term" + ) as pre: + pre.assert_not_called() + pb_message = business_glossary.DeleteGlossaryTermRequest.pb( + business_glossary.DeleteGlossaryTermRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = business_glossary.DeleteGlossaryTermRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_glossary_term( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_get_glossary_term_rest_bad_request( + request_type=business_glossary.GetGlossaryTermRequest, +): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/glossaries/sample3/terms/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_glossary_term(request) + + +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.GetGlossaryTermRequest, + dict, + ], +) +def test_get_glossary_term_rest_call_success(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/glossaries/sample3/terms/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.GlossaryTerm( + name="name_value", + uid="uid_value", + display_name="display_name_value", + description="description_value", + parent="parent_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.GlossaryTerm.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_glossary_term(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, business_glossary.GlossaryTerm) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.parent == "parent_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_glossary_term_rest_interceptors(null_interceptor): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BusinessGlossaryServiceRestInterceptor(), + ) + client = BusinessGlossaryServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "post_get_glossary_term" + ) as post, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "post_get_glossary_term_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "pre_get_glossary_term" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = business_glossary.GetGlossaryTermRequest.pb( + business_glossary.GetGlossaryTermRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = business_glossary.GlossaryTerm.to_json( + business_glossary.GlossaryTerm() + ) + req.return_value.content = return_value + + request = business_glossary.GetGlossaryTermRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = business_glossary.GlossaryTerm() + post_with_metadata.return_value = business_glossary.GlossaryTerm(), metadata + + client.get_glossary_term( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_glossary_terms_rest_bad_request( + request_type=business_glossary.ListGlossaryTermsRequest, +): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/glossaries/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_glossary_terms(request) + + +@pytest.mark.parametrize( + "request_type", + [ + business_glossary.ListGlossaryTermsRequest, + dict, + ], +) +def test_list_glossary_terms_rest_call_success(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/glossaries/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = business_glossary.ListGlossaryTermsResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = business_glossary.ListGlossaryTermsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_glossary_terms(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListGlossaryTermsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable_locations == ["unreachable_locations_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_glossary_terms_rest_interceptors(null_interceptor): + transport = transports.BusinessGlossaryServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BusinessGlossaryServiceRestInterceptor(), + ) + client = BusinessGlossaryServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "post_list_glossary_terms" + ) as post, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, + "post_list_glossary_terms_with_metadata", + ) as post_with_metadata, mock.patch.object( + transports.BusinessGlossaryServiceRestInterceptor, "pre_list_glossary_terms" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = business_glossary.ListGlossaryTermsRequest.pb( + business_glossary.ListGlossaryTermsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = business_glossary.ListGlossaryTermsResponse.to_json( + business_glossary.ListGlossaryTermsResponse() + ) + req.return_value.content = return_value + + request = business_glossary.ListGlossaryTermsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = business_glossary.ListGlossaryTermsResponse() + post_with_metadata.return_value = ( + business_glossary.ListGlossaryTermsResponse(), + metadata, + ) + + client.list_glossary_terms( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_location(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.GetLocationRequest, + dict, + ], +) +def test_get_location_rest(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.Location() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_location(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_list_locations_rest_bad_request( + request_type=locations_pb2.ListLocationsRequest, +): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict({"name": "projects/sample1"}, request) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_locations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + locations_pb2.ListLocationsRequest, + dict, + ], +) +def test_list_locations_rest(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = locations_pb2.ListLocationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_locations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_cancel_operation_rest_bad_request( + request_type=operations_pb2.CancelOperationRequest, +): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + request_type=operations_pb2.DeleteOperationRequest, +): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_initialize_client_w_rest(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_glossary_empty_call_rest(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_glossary), "__call__") as call: + client.create_glossary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.CreateGlossaryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_glossary_empty_call_rest(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_glossary), "__call__") as call: + client.update_glossary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.UpdateGlossaryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_glossary_empty_call_rest(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_glossary), "__call__") as call: + client.delete_glossary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.DeleteGlossaryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_glossary_empty_call_rest(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_glossary), "__call__") as call: + client.get_glossary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.GetGlossaryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_glossaries_empty_call_rest(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_glossaries), "__call__") as call: + client.list_glossaries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.ListGlossariesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_glossary_category_empty_call_rest(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_category), "__call__" + ) as call: + client.create_glossary_category(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.CreateGlossaryCategoryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_glossary_category_empty_call_rest(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_category), "__call__" + ) as call: + client.update_glossary_category(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.UpdateGlossaryCategoryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_glossary_category_empty_call_rest(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_category), "__call__" + ) as call: + client.delete_glossary_category(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.DeleteGlossaryCategoryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_glossary_category_empty_call_rest(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_category), "__call__" + ) as call: + client.get_glossary_category(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.GetGlossaryCategoryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_glossary_categories_empty_call_rest(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_categories), "__call__" + ) as call: + client.list_glossary_categories(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.ListGlossaryCategoriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_glossary_term_empty_call_rest(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_glossary_term), "__call__" + ) as call: + client.create_glossary_term(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.CreateGlossaryTermRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_glossary_term_empty_call_rest(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_glossary_term), "__call__" + ) as call: + client.update_glossary_term(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.UpdateGlossaryTermRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_glossary_term_empty_call_rest(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_glossary_term), "__call__" + ) as call: + client.delete_glossary_term(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.DeleteGlossaryTermRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_glossary_term_empty_call_rest(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_glossary_term), "__call__" + ) as call: + client.get_glossary_term(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.GetGlossaryTermRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_glossary_terms_empty_call_rest(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_glossary_terms), "__call__" + ) as call: + client.list_glossary_terms(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = business_glossary.ListGlossaryTermsRequest() + + assert args[0] == request_msg + + +def test_business_glossary_service_rest_lro_client(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.BusinessGlossaryServiceGrpcTransport, + ) + + +def test_business_glossary_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.BusinessGlossaryServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_business_glossary_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.dataplex_v1.services.business_glossary_service.transports.BusinessGlossaryServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.BusinessGlossaryServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_glossary", + "update_glossary", + "delete_glossary", + "get_glossary", + "list_glossaries", + "create_glossary_category", + "update_glossary_category", + "delete_glossary_category", + "get_glossary_category", + "list_glossary_categories", + "create_glossary_term", + "update_glossary_term", + "delete_glossary_term", + "get_glossary_term", + "list_glossary_terms", + "get_location", + "list_locations", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_business_glossary_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.dataplex_v1.services.business_glossary_service.transports.BusinessGlossaryServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.BusinessGlossaryServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_business_glossary_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.cloud.dataplex_v1.services.business_glossary_service.transports.BusinessGlossaryServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.BusinessGlossaryServiceTransport() + adc.assert_called_once() + + +def test_business_glossary_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + BusinessGlossaryServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.BusinessGlossaryServiceGrpcTransport, + transports.BusinessGlossaryServiceGrpcAsyncIOTransport, + ], +) +def test_business_glossary_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.BusinessGlossaryServiceGrpcTransport, + transports.BusinessGlossaryServiceGrpcAsyncIOTransport, + transports.BusinessGlossaryServiceRestTransport, + ], +) +def test_business_glossary_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.BusinessGlossaryServiceGrpcTransport, grpc_helpers), + (transports.BusinessGlossaryServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_business_glossary_service_transport_create_channel( + transport_class, grpc_helpers +): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "dataplex.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="dataplex.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.BusinessGlossaryServiceGrpcTransport, + transports.BusinessGlossaryServiceGrpcAsyncIOTransport, + ], +) +def test_business_glossary_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_business_glossary_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.BusinessGlossaryServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_business_glossary_service_host_no_port(transport_name): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="dataplex.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "dataplex.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataplex.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_business_glossary_service_host_with_port(transport_name): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="dataplex.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "dataplex.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://dataplex.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_business_glossary_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = BusinessGlossaryServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = BusinessGlossaryServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_glossary._session + session2 = client2.transport.create_glossary._session + assert session1 != session2 + session1 = client1.transport.update_glossary._session + session2 = client2.transport.update_glossary._session + assert session1 != session2 + session1 = client1.transport.delete_glossary._session + session2 = client2.transport.delete_glossary._session + assert session1 != session2 + session1 = client1.transport.get_glossary._session + session2 = client2.transport.get_glossary._session + assert session1 != session2 + session1 = client1.transport.list_glossaries._session + session2 = client2.transport.list_glossaries._session + assert session1 != session2 + session1 = client1.transport.create_glossary_category._session + session2 = client2.transport.create_glossary_category._session + assert session1 != session2 + session1 = client1.transport.update_glossary_category._session + session2 = client2.transport.update_glossary_category._session + assert session1 != session2 + session1 = client1.transport.delete_glossary_category._session + session2 = client2.transport.delete_glossary_category._session + assert session1 != session2 + session1 = client1.transport.get_glossary_category._session + session2 = client2.transport.get_glossary_category._session + assert session1 != session2 + session1 = client1.transport.list_glossary_categories._session + session2 = client2.transport.list_glossary_categories._session + assert session1 != session2 + session1 = client1.transport.create_glossary_term._session + session2 = client2.transport.create_glossary_term._session + assert session1 != session2 + session1 = client1.transport.update_glossary_term._session + session2 = client2.transport.update_glossary_term._session + assert session1 != session2 + session1 = client1.transport.delete_glossary_term._session + session2 = client2.transport.delete_glossary_term._session + assert session1 != session2 + session1 = client1.transport.get_glossary_term._session + session2 = client2.transport.get_glossary_term._session + assert session1 != session2 + session1 = client1.transport.list_glossary_terms._session + session2 = client2.transport.list_glossary_terms._session + assert session1 != session2 + + +def test_business_glossary_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.BusinessGlossaryServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_business_glossary_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.BusinessGlossaryServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.BusinessGlossaryServiceGrpcTransport, + transports.BusinessGlossaryServiceGrpcAsyncIOTransport, + ], +) +def test_business_glossary_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.BusinessGlossaryServiceGrpcTransport, + transports.BusinessGlossaryServiceGrpcAsyncIOTransport, + ], +) +def test_business_glossary_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_business_glossary_service_grpc_lro_client(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_business_glossary_service_grpc_lro_async_client(): + client = BusinessGlossaryServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_glossary_path(): + project = "squid" + location = "clam" + glossary = "whelk" + expected = "projects/{project}/locations/{location}/glossaries/{glossary}".format( + project=project, + location=location, + glossary=glossary, + ) + actual = BusinessGlossaryServiceClient.glossary_path(project, location, glossary) + assert expected == actual + + +def test_parse_glossary_path(): + expected = { + "project": "octopus", + "location": "oyster", + "glossary": "nudibranch", + } + path = BusinessGlossaryServiceClient.glossary_path(**expected) + + # Check that the path construction is reversible. + actual = BusinessGlossaryServiceClient.parse_glossary_path(path) + assert expected == actual + + +def test_glossary_category_path(): + project = "cuttlefish" + location = "mussel" + glossary = "winkle" + glossary_category = "nautilus" + expected = "projects/{project}/locations/{location}/glossaries/{glossary}/categories/{glossary_category}".format( + project=project, + location=location, + glossary=glossary, + glossary_category=glossary_category, + ) + actual = BusinessGlossaryServiceClient.glossary_category_path( + project, location, glossary, glossary_category + ) + assert expected == actual + + +def test_parse_glossary_category_path(): + expected = { + "project": "scallop", + "location": "abalone", + "glossary": "squid", + "glossary_category": "clam", + } + path = BusinessGlossaryServiceClient.glossary_category_path(**expected) + + # Check that the path construction is reversible. + actual = BusinessGlossaryServiceClient.parse_glossary_category_path(path) + assert expected == actual + + +def test_glossary_term_path(): + project = "whelk" + location = "octopus" + glossary = "oyster" + glossary_term = "nudibranch" + expected = "projects/{project}/locations/{location}/glossaries/{glossary}/terms/{glossary_term}".format( + project=project, + location=location, + glossary=glossary, + glossary_term=glossary_term, + ) + actual = BusinessGlossaryServiceClient.glossary_term_path( + project, location, glossary, glossary_term + ) + assert expected == actual + + +def test_parse_glossary_term_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + "glossary": "winkle", + "glossary_term": "nautilus", + } + path = BusinessGlossaryServiceClient.glossary_term_path(**expected) + + # Check that the path construction is reversible. + actual = BusinessGlossaryServiceClient.parse_glossary_term_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "scallop" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = BusinessGlossaryServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "abalone", + } + path = BusinessGlossaryServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = BusinessGlossaryServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "squid" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = BusinessGlossaryServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "clam", + } + path = BusinessGlossaryServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = BusinessGlossaryServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "whelk" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = BusinessGlossaryServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "octopus", + } + path = BusinessGlossaryServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = BusinessGlossaryServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "oyster" + expected = "projects/{project}".format( + project=project, + ) + actual = BusinessGlossaryServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nudibranch", + } + path = BusinessGlossaryServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = BusinessGlossaryServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = BusinessGlossaryServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "winkle", + "location": "nautilus", + } + path = BusinessGlossaryServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = BusinessGlossaryServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.BusinessGlossaryServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.BusinessGlossaryServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = BusinessGlossaryServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_delete_operation(transport: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation(transport: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation(transport: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations(transport: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_locations(transport: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + response = client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +@pytest.mark.asyncio +async def test_list_locations_async(transport: str = "grpc_asyncio"): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.ListLocationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.ListLocationsResponse) + + +def test_list_locations_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = locations_pb2.ListLocationsResponse() + + client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_locations_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.ListLocationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + await client.list_locations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_locations_from_dict(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.ListLocationsResponse() + + response = client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_locations_from_dict_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.ListLocationsResponse() + ) + response = await client.list_locations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_location(transport: str = "grpc"): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + response = client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +@pytest.mark.asyncio +async def test_get_location_async(transport: str = "grpc_asyncio"): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = locations_pb2.GetLocationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, locations_pb2.Location) + + +def test_get_location_field_headers(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = locations_pb2.Location() + + client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_location_field_headers_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials() + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = locations_pb2.GetLocationRequest() + request.name = "locations/abc" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_location), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + await client.get_location(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations/abc", + ) in kw["metadata"] + + +def test_get_location_from_dict(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = locations_pb2.Location() + + response = client.get_location( + request={ + "name": "locations/abc", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_location_from_dict_async(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_locations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + locations_pb2.Location() + ) + response = await client.get_location( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_transport_close_grpc(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = BusinessGlossaryServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = BusinessGlossaryServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + BusinessGlossaryServiceClient, + transports.BusinessGlossaryServiceGrpcTransport, + ), + ( + BusinessGlossaryServiceAsyncClient, + transports.BusinessGlossaryServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py index c08cc9306417..87518b65c369 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_catalog_service.py @@ -11218,13 +11218,85 @@ async def test_cancel_metadata_job_flattened_error_async(): ) -def test_create_entry_type_rest_use_cached_wrapped_rpc(): +@pytest.mark.parametrize( + "request_type", + [ + catalog.CreateEntryLinkRequest, + dict, + ], +) +def test_create_entry_link(request_type, transport: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryLink( + name="name_value", + entry_link_type="entry_link_type_value", + ) + response = client.create_entry_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.CreateEntryLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.EntryLink) + assert response.name == "name_value" + assert response.entry_link_type == "entry_link_type_value" + + +def test_create_entry_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.CreateEntryLinkRequest( + parent="parent_value", + entry_link_id="entry_link_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_link), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_entry_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.CreateEntryLinkRequest( + parent="parent_value", + entry_link_id="entry_link_id_value", + ) + + +def test_create_entry_link_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -11232,7 +11304,7 @@ def test_create_entry_type_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_entry_type in client._transport._wrapped_methods + assert client._transport.create_entry_link in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -11240,208 +11312,350 @@ def test_create_entry_type_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_entry_type + client._transport.create_entry_link ] = mock_rpc - request = {} - client.create_entry_type(request) + client.create_entry_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_entry_type(request) + client.create_entry_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_entry_type_rest_required_fields( - request_type=catalog.CreateEntryTypeRequest, +@pytest.mark.asyncio +async def test_create_entry_link_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.CatalogServiceRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["entry_type_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - # verify fields with default values are dropped - assert "entryTypeId" not in jsonified_request + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_entry_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Ensure method has been cached + assert ( + client._client._transport.create_entry_link + in client._client._transport._wrapped_methods + ) - # verify required fields with default values are now present - assert "entryTypeId" in jsonified_request - assert jsonified_request["entryTypeId"] == request_init["entry_type_id"] + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_entry_link + ] = mock_rpc - jsonified_request["parent"] = "parent_value" - jsonified_request["entryTypeId"] = "entry_type_id_value" + request = {} + await client.create_entry_link(request) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_entry_type._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "entry_type_id", - "validate_only", - ) - ) - jsonified_request.update(unset_fields) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "entryTypeId" in jsonified_request - assert jsonified_request["entryTypeId"] == "entry_type_id_value" + await client.create_entry_link(request) - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) +@pytest.mark.asyncio +async def test_create_entry_link_async( + transport: str = "grpc_asyncio", request_type=catalog.CreateEntryLinkRequest +): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response = client.create_entry_type(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.EntryLink( + name="name_value", + entry_link_type="entry_link_type_value", + ) + ) + response = await client.create_entry_link(request) - expected_params = [ - ( - "entryTypeId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.CreateEntryLinkRequest() + assert args[0] == request + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.EntryLink) + assert response.name == "name_value" + assert response.entry_link_type == "entry_link_type_value" -def test_create_entry_type_rest_unset_required_fields(): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - unset_fields = transport.create_entry_type._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "entryTypeId", - "validateOnly", - ) - ) - & set( - ( - "parent", - "entryTypeId", - "entryType", - ) - ) - ) +@pytest.mark.asyncio +async def test_create_entry_link_async_from_dict(): + await test_create_entry_link_async(request_type=dict) -def test_create_entry_type_rest_flattened(): +def test_create_entry_link_field_headers(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CreateEntryLinkRequest() - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + request.parent = "parent_value" - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - entry_type=catalog.EntryType(name="name_value"), - entry_type_id="entry_type_id_value", - ) - mock_args.update(sample_request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_link), "__call__" + ) as call: + call.return_value = catalog.EntryLink() + client.create_entry_link(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - client.create_entry_type(**mock_args) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_entry_link_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.CreateEntryLinkRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_link), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryLink()) + await client.create_entry_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_entry_link_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryLink() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_entry_link( + parent="parent_value", + entry_link=catalog.EntryLink(name="name_value"), + entry_link_id="entry_link_id_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/entryTypes" % client.transport._host, - args[1], - ) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].entry_link + mock_val = catalog.EntryLink(name="name_value") + assert arg == mock_val + arg = args[0].entry_link_id + mock_val = "entry_link_id_value" + assert arg == mock_val -def test_create_entry_type_rest_flattened_error(transport: str = "rest"): +def test_create_entry_link_flattened_error(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_entry_type( - catalog.CreateEntryTypeRequest(), + client.create_entry_link( + catalog.CreateEntryLinkRequest(), parent="parent_value", - entry_type=catalog.EntryType(name="name_value"), - entry_type_id="entry_type_id_value", + entry_link=catalog.EntryLink(name="name_value"), + entry_link_id="entry_link_id_value", ) -def test_update_entry_type_rest_use_cached_wrapped_rpc(): +@pytest.mark.asyncio +async def test_create_entry_link_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryLink() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryLink()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_entry_link( + parent="parent_value", + entry_link=catalog.EntryLink(name="name_value"), + entry_link_id="entry_link_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].entry_link + mock_val = catalog.EntryLink(name="name_value") + assert arg == mock_val + arg = args[0].entry_link_id + mock_val = "entry_link_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_entry_link_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_entry_link( + catalog.CreateEntryLinkRequest(), + parent="parent_value", + entry_link=catalog.EntryLink(name="name_value"), + entry_link_id="entry_link_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + catalog.DeleteEntryLinkRequest, + dict, + ], +) +def test_delete_entry_link(request_type, transport: str = "grpc"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryLink( + name="name_value", + entry_link_type="entry_link_type_value", + ) + response = client.delete_entry_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.DeleteEntryLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.EntryLink) + assert response.name == "name_value" + assert response.entry_link_type == "entry_link_type_value" + + +def test_delete_entry_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.DeleteEntryLinkRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_link), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_entry_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.DeleteEntryLinkRequest( + name="name_value", + ) + + +def test_delete_entry_link_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -11449,7 +11663,7 @@ def test_update_entry_type_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_entry_type in client._transport._wrapped_methods + assert client._transport.delete_entry_link in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -11457,192 +11671,326 @@ def test_update_entry_type_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_entry_type + client._transport.delete_entry_link ] = mock_rpc - request = {} - client.update_entry_type(request) + client.delete_entry_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + client.delete_entry_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_entry_link_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 wrapper_fn.reset_mock() - client.update_entry_type(request) + # Ensure method has been cached + assert ( + client._client._transport.delete_entry_link + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_entry_link + ] = mock_rpc + + request = {} + await client.delete_entry_link(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_entry_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_entry_type_rest_required_fields( - request_type=catalog.UpdateEntryTypeRequest, +@pytest.mark.asyncio +async def test_delete_entry_link_async( + transport: str = "grpc_asyncio", request_type=catalog.DeleteEntryLinkRequest ): - transport_class = transports.CatalogServiceRestTransport - - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - # verify fields with default values are dropped + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_entry_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.EntryLink( + name="name_value", + entry_link_type="entry_link_type_value", + ) + ) + response = await client.delete_entry_link(request) - # verify required fields with default values are now present + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.DeleteEntryLinkRequest() + assert args[0] == request - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_entry_type._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "update_mask", - "validate_only", - ) - ) - jsonified_request.update(unset_fields) + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.EntryLink) + assert response.name == "name_value" + assert response.entry_link_type == "entry_link_type_value" + + +@pytest.mark.asyncio +async def test_delete_entry_link_async_from_dict(): + await test_delete_entry_link_async(request_type=dict) - # verify required fields with non-default values are left alone +def test_delete_entry_link_field_headers(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.DeleteEntryLinkRequest() - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + request.name = "name_value" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_link), "__call__" + ) as call: + call.return_value = catalog.EntryLink() + client.delete_entry_link(request) - response = client.update_entry_type(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -def test_update_entry_type_rest_unset_required_fields(): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_delete_entry_link_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), ) - unset_fields = transport.update_entry_type._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "updateMask", - "validateOnly", - ) - ) - & set( - ( - "entryType", - "updateMask", - ) - ) - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.DeleteEntryLinkRequest() + + request.name = "name_value" + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_link), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryLink()) + await client.delete_entry_link(request) -def test_update_entry_type_rest_flattened(): + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_entry_link_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryLink() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_entry_link( + name="name_value", + ) - # get arguments that satisfy an http rule for this method - sample_request = { - "entry_type": { - "name": "projects/sample1/locations/sample2/entryTypes/sample3" - } - } + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val - # get truthy value for each flattened field - mock_args = dict( - entry_type=catalog.EntryType(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + +def test_delete_entry_link_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_entry_link( + catalog.DeleteEntryLinkRequest(), + name="name_value", ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_entry_type(**mock_args) +@pytest.mark.asyncio +async def test_delete_entry_link_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryLink() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryLink()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_entry_link( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{entry_type.name=projects/*/locations/*/entryTypes/*}" - % client.transport._host, - args[1], + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_entry_link_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_entry_link( + catalog.DeleteEntryLinkRequest(), + name="name_value", ) -def test_update_entry_type_rest_flattened_error(transport: str = "rest"): +@pytest.mark.parametrize( + "request_type", + [ + catalog.GetEntryLinkRequest, + dict, + ], +) +def test_get_entry_link(request_type, transport: str = "grpc"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_entry_type( - catalog.UpdateEntryTypeRequest(), - entry_type=catalog.EntryType(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_entry_link), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryLink( + name="name_value", + entry_link_type="entry_link_type_value", ) + response = client.get_entry_link(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = catalog.GetEntryLinkRequest() + assert args[0] == request -def test_delete_entry_type_rest_use_cached_wrapped_rpc(): + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.EntryLink) + assert response.name == "name_value" + assert response.entry_link_type == "entry_link_type_value" + + +def test_get_entry_link_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = catalog.GetEntryLinkRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_entry_link), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_entry_link(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == catalog.GetEntryLinkRequest( + name="name_value", + ) + + +def test_get_entry_link_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -11650,175 +11998,250 @@ def test_delete_entry_type_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_entry_type in client._transport._wrapped_methods + assert client._transport.get_entry_link in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_entry_type - ] = mock_rpc - + client._transport._wrapped_methods[client._transport.get_entry_link] = mock_rpc request = {} - client.delete_entry_type(request) + client.get_entry_link(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_entry_type(request) + client.get_entry_link(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_entry_type_rest_required_fields( - request_type=catalog.DeleteEntryTypeRequest, +@pytest.mark.asyncio +async def test_get_entry_link_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.CatalogServiceRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - # verify fields with default values are dropped + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_entry_type._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Ensure method has been cached + assert ( + client._client._transport.get_entry_link + in client._client._transport._wrapped_methods + ) - # verify required fields with default values are now present + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_entry_link + ] = mock_rpc - jsonified_request["name"] = "name_value" + request = {} + await client.get_entry_link(request) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_entry_type._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("etag",)) - jsonified_request.update(unset_fields) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_entry_link(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" +@pytest.mark.asyncio +async def test_get_entry_link_async( + transport: str = "grpc_asyncio", request_type=catalog.GetEntryLinkRequest +): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_entry_link), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.EntryLink( + name="name_value", + entry_link_type="entry_link_type_value", + ) + ) + response = await client.get_entry_link(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = catalog.GetEntryLinkRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.EntryLink) + assert response.name == "name_value" + assert response.entry_link_type == "entry_link_type_value" + + +@pytest.mark.asyncio +async def test_get_entry_link_async_from_dict(): + await test_get_entry_link_async(request_type=dict) + + +def test_get_entry_link_field_headers(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.GetEntryLinkRequest() - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + request.name = "name_value" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_entry_link), "__call__") as call: + call.return_value = catalog.EntryLink() + client.get_entry_link(request) - response = client.delete_entry_type(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -def test_delete_entry_type_rest_unset_required_fields(): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_get_entry_link_field_headers_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), ) - unset_fields = transport.delete_entry_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(("etag",)) & set(("name",))) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = catalog.GetEntryLinkRequest() + + request.name = "name_value" + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_entry_link), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryLink()) + await client.get_entry_link(request) -def test_delete_entry_type_rest_flattened(): + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_entry_link_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_entry_link), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryLink() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_entry_link( + name="name_value", + ) - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/entryTypes/sample3" - } + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val - # get truthy value for each flattened field - mock_args = dict( + +def test_get_entry_link_flattened_error(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_entry_link( + catalog.GetEntryLinkRequest(), name="name_value", ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_entry_type(**mock_args) +@pytest.mark.asyncio +async def test_get_entry_link_flattened_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_entry_link), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = catalog.EntryLink() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(catalog.EntryLink()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_entry_link( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/entryTypes/*}" % client.transport._host, - args[1], - ) + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val -def test_delete_entry_type_rest_flattened_error(transport: str = "rest"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +@pytest.mark.asyncio +async def test_get_entry_link_flattened_error_async(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_entry_type( - catalog.DeleteEntryTypeRequest(), + await client.get_entry_link( + catalog.GetEntryLinkRequest(), name="name_value", ) -def test_list_entry_types_rest_use_cached_wrapped_rpc(): +def test_create_entry_type_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -11832,7 +12255,7 @@ def test_list_entry_types_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_entry_types in client._transport._wrapped_methods + assert client._transport.create_entry_type in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -11840,29 +12263,34 @@ def test_list_entry_types_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_entry_types + client._transport.create_entry_type ] = mock_rpc request = {} - client.list_entry_types(request) + client.create_entry_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_entry_types(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_entry_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_entry_types_rest_required_fields( - request_type=catalog.ListEntryTypesRequest, +def test_create_entry_type_rest_required_fields( + request_type=catalog.CreateEntryTypeRequest, ): transport_class = transports.CatalogServiceRestTransport request_init = {} request_init["parent"] = "" + request_init["entry_type_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -11870,26 +12298,28 @@ def test_list_entry_types_rest_required_fields( ) # verify fields with default values are dropped + assert "entryTypeId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_entry_types._get_unset_required_fields(jsonified_request) + ).create_entry_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "entryTypeId" in jsonified_request + assert jsonified_request["entryTypeId"] == request_init["entry_type_id"] jsonified_request["parent"] = "parent_value" + jsonified_request["entryTypeId"] = "entry_type_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_entry_types._get_unset_required_fields(jsonified_request) + ).create_entry_type._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "filter", - "order_by", - "page_size", - "page_token", + "entry_type_id", + "validate_only", ) ) jsonified_request.update(unset_fields) @@ -11897,6 +12327,8 @@ def test_list_entry_types_rest_required_fields( # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "entryTypeId" in jsonified_request + assert jsonified_request["entryTypeId"] == "entry_type_id_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11905,7 +12337,7 @@ def test_list_entry_types_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = catalog.ListEntryTypesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -11917,49 +12349,57 @@ def test_list_entry_types_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.ListEntryTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_entry_types(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - + response = client.create_entry_type(request) -def test_list_entry_types_rest_unset_required_fields(): + expected_params = [ + ( + "entryTypeId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_entry_type_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_entry_types._get_unset_required_fields({}) + unset_fields = transport.create_entry_type._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "filter", - "orderBy", - "pageSize", - "pageToken", + "entryTypeId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "entryTypeId", + "entryType", ) ) - & set(("parent",)) ) -def test_list_entry_types_rest_flattened(): +def test_create_entry_type_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -11968,7 +12408,7 @@ def test_list_entry_types_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.ListEntryTypesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/locations/sample2"} @@ -11976,20 +12416,20 @@ def test_list_entry_types_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", + entry_type=catalog.EntryType(name="name_value"), + entry_type_id="entry_type_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = catalog.ListEntryTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_entry_types(**mock_args) + client.create_entry_type(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -12001,7 +12441,7 @@ def test_list_entry_types_rest_flattened(): ) -def test_list_entry_types_rest_flattened_error(transport: str = "rest"): +def test_create_entry_type_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12010,74 +12450,15 @@ def test_list_entry_types_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_entry_types( - catalog.ListEntryTypesRequest(), + client.create_entry_type( + catalog.CreateEntryTypeRequest(), parent="parent_value", + entry_type=catalog.EntryType(name="name_value"), + entry_type_id="entry_type_id_value", ) -def test_list_entry_types_rest_pager(transport: str = "rest"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - catalog.ListEntryTypesResponse( - entry_types=[ - catalog.EntryType(), - catalog.EntryType(), - catalog.EntryType(), - ], - next_page_token="abc", - ), - catalog.ListEntryTypesResponse( - entry_types=[], - next_page_token="def", - ), - catalog.ListEntryTypesResponse( - entry_types=[ - catalog.EntryType(), - ], - next_page_token="ghi", - ), - catalog.ListEntryTypesResponse( - entry_types=[ - catalog.EntryType(), - catalog.EntryType(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(catalog.ListEntryTypesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_entry_types(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, catalog.EntryType) for i in results) - - pages = list(client.list_entry_types(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_get_entry_type_rest_use_cached_wrapped_rpc(): +def test_update_entry_type_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12091,33 +12472,40 @@ def test_get_entry_type_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_entry_type in client._transport._wrapped_methods + assert client._transport.update_entry_type in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_entry_type] = mock_rpc + client._transport._wrapped_methods[ + client._transport.update_entry_type + ] = mock_rpc request = {} - client.get_entry_type(request) + client.update_entry_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_entry_type(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_entry_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_entry_type_rest_required_fields(request_type=catalog.GetEntryTypeRequest): +def test_update_entry_type_rest_required_fields( + request_type=catalog.UpdateEntryTypeRequest, +): transport_class = transports.CatalogServiceRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -12128,21 +12516,24 @@ def test_get_entry_type_rest_required_fields(request_type=catalog.GetEntryTypeRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_entry_type._get_unset_required_fields(jsonified_request) + ).update_entry_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_entry_type._get_unset_required_fields(jsonified_request) + ).update_entry_type._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "update_mask", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12151,7 +12542,7 @@ def test_get_entry_type_rest_required_fields(request_type=catalog.GetEntryTypeRe request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = catalog.EntryType() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -12163,39 +12554,50 @@ def test_get_entry_type_rest_required_fields(request_type=catalog.GetEntryTypeRe pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.EntryType.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_entry_type(request) + response = client.update_entry_type(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_entry_type_rest_unset_required_fields(): +def test_update_entry_type_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_entry_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_entry_type._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "entryType", + "updateMask", + ) + ) + ) -def test_get_entry_type_rest_flattened(): +def test_update_entry_type_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -12204,42 +12606,44 @@ def test_get_entry_type_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.EntryType() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/entryTypes/sample3" + "entry_type": { + "name": "projects/sample1/locations/sample2/entryTypes/sample3" + } } # get truthy value for each flattened field mock_args = dict( - name="name_value", + entry_type=catalog.EntryType(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = catalog.EntryType.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_entry_type(**mock_args) + client.update_entry_type(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/entryTypes/*}" % client.transport._host, + "%s/v1/{entry_type.name=projects/*/locations/*/entryTypes/*}" + % client.transport._host, args[1], ) -def test_get_entry_type_rest_flattened_error(transport: str = "rest"): +def test_update_entry_type_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12248,13 +12652,14 @@ def test_get_entry_type_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_entry_type( - catalog.GetEntryTypeRequest(), - name="name_value", + client.update_entry_type( + catalog.UpdateEntryTypeRequest(), + entry_type=catalog.EntryType(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_create_aspect_type_rest_use_cached_wrapped_rpc(): +def test_delete_entry_type_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12268,9 +12673,7 @@ def test_create_aspect_type_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.create_aspect_type in client._transport._wrapped_methods - ) + assert client._transport.delete_entry_type in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -12278,11 +12681,11 @@ def test_create_aspect_type_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_aspect_type + client._transport.delete_entry_type ] = mock_rpc request = {} - client.create_aspect_type(request) + client.delete_entry_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -12291,21 +12694,20 @@ def test_create_aspect_type_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_aspect_type(request) + client.delete_entry_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_aspect_type_rest_required_fields( - request_type=catalog.CreateAspectTypeRequest, +def test_delete_entry_type_rest_required_fields( + request_type=catalog.DeleteEntryTypeRequest, ): transport_class = transports.CatalogServiceRestTransport request_init = {} - request_init["parent"] = "" - request_init["aspect_type_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -12313,37 +12715,26 @@ def test_create_aspect_type_rest_required_fields( ) # verify fields with default values are dropped - assert "aspectTypeId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_aspect_type._get_unset_required_fields(jsonified_request) + ).delete_entry_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "aspectTypeId" in jsonified_request - assert jsonified_request["aspectTypeId"] == request_init["aspect_type_id"] - jsonified_request["parent"] = "parent_value" - jsonified_request["aspectTypeId"] = "aspect_type_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_aspect_type._get_unset_required_fields(jsonified_request) + ).delete_entry_type._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "aspect_type_id", - "validate_only", - ) - ) + assert not set(unset_fields) - set(("etag",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "aspectTypeId" in jsonified_request - assert jsonified_request["aspectTypeId"] == "aspect_type_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12364,10 +12755,9 @@ def test_create_aspect_type_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -12378,43 +12768,23 @@ def test_create_aspect_type_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_aspect_type(request) + response = client.delete_entry_type(request) - expected_params = [ - ( - "aspectTypeId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_aspect_type_rest_unset_required_fields(): +def test_delete_entry_type_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_aspect_type._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "aspectTypeId", - "validateOnly", - ) - ) - & set( - ( - "parent", - "aspectTypeId", - "aspectType", - ) - ) - ) + unset_fields = transport.delete_entry_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(("etag",)) & set(("name",))) -def test_create_aspect_type_rest_flattened(): +def test_delete_entry_type_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -12426,13 +12796,13 @@ def test_create_aspect_type_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/entryTypes/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - aspect_type=catalog.AspectType(name="name_value"), - aspect_type_id="aspect_type_id_value", + name="name_value", ) mock_args.update(sample_request) @@ -12444,20 +12814,19 @@ def test_create_aspect_type_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_aspect_type(**mock_args) + client.delete_entry_type(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/aspectTypes" - % client.transport._host, + "%s/v1/{name=projects/*/locations/*/entryTypes/*}" % client.transport._host, args[1], ) -def test_create_aspect_type_rest_flattened_error(transport: str = "rest"): +def test_delete_entry_type_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12466,15 +12835,13 @@ def test_create_aspect_type_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_aspect_type( - catalog.CreateAspectTypeRequest(), - parent="parent_value", - aspect_type=catalog.AspectType(name="name_value"), - aspect_type_id="aspect_type_id_value", + client.delete_entry_type( + catalog.DeleteEntryTypeRequest(), + name="name_value", ) -def test_update_aspect_type_rest_use_cached_wrapped_rpc(): +def test_list_entry_types_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12488,9 +12855,7 @@ def test_update_aspect_type_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_aspect_type in client._transport._wrapped_methods - ) + assert client._transport.list_entry_types in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -12498,32 +12863,29 @@ def test_update_aspect_type_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_aspect_type + client._transport.list_entry_types ] = mock_rpc request = {} - client.update_aspect_type(request) + client.list_entry_types(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_aspect_type(request) + client.list_entry_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_aspect_type_rest_required_fields( - request_type=catalog.UpdateAspectTypeRequest, +def test_list_entry_types_rest_required_fields( + request_type=catalog.ListEntryTypesRequest, ): transport_class = transports.CatalogServiceRestTransport request_init = {} + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -12534,24 +12896,30 @@ def test_update_aspect_type_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_aspect_type._get_unset_required_fields(jsonified_request) + ).list_entry_types._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_aspect_type._get_unset_required_fields(jsonified_request) + ).list_entry_types._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "update_mask", - "validate_only", + "filter", + "order_by", + "page_size", + "page_token", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12560,7 +12928,7 @@ def test_update_aspect_type_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = catalog.ListEntryTypesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -12572,50 +12940,49 @@ def test_update_aspect_type_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.ListEntryTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_aspect_type(request) + response = client.list_entry_types(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_aspect_type_rest_unset_required_fields(): +def test_list_entry_types_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_aspect_type._get_unset_required_fields({}) + unset_fields = transport.list_entry_types._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "updateMask", - "validateOnly", - ) - ) - & set( - ( - "aspectType", - "updateMask", + "filter", + "orderBy", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) -def test_update_aspect_type_rest_flattened(): +def test_list_entry_types_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -12624,44 +12991,40 @@ def test_update_aspect_type_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = catalog.ListEntryTypesResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "aspect_type": { - "name": "projects/sample1/locations/sample2/aspectTypes/sample3" - } - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - aspect_type=catalog.AspectType(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.ListEntryTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_aspect_type(**mock_args) + client.list_entry_types(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{aspect_type.name=projects/*/locations/*/aspectTypes/*}" - % client.transport._host, + "%s/v1/{parent=projects/*/locations/*}/entryTypes" % client.transport._host, args[1], ) -def test_update_aspect_type_rest_flattened_error(transport: str = "rest"): +def test_list_entry_types_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12670,14 +13033,74 @@ def test_update_aspect_type_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_aspect_type( - catalog.UpdateAspectTypeRequest(), - aspect_type=catalog.AspectType(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_entry_types( + catalog.ListEntryTypesRequest(), + parent="parent_value", ) -def test_delete_aspect_type_rest_use_cached_wrapped_rpc(): +def test_list_entry_types_rest_pager(transport: str = "rest"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + catalog.ListEntryTypesResponse( + entry_types=[ + catalog.EntryType(), + catalog.EntryType(), + catalog.EntryType(), + ], + next_page_token="abc", + ), + catalog.ListEntryTypesResponse( + entry_types=[], + next_page_token="def", + ), + catalog.ListEntryTypesResponse( + entry_types=[ + catalog.EntryType(), + ], + next_page_token="ghi", + ), + catalog.ListEntryTypesResponse( + entry_types=[ + catalog.EntryType(), + catalog.EntryType(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(catalog.ListEntryTypesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_entry_types(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, catalog.EntryType) for i in results) + + pages = list(client.list_entry_types(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_entry_type_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12691,39 +13114,29 @@ def test_delete_aspect_type_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_aspect_type in client._transport._wrapped_methods - ) + assert client._transport.get_entry_type in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_aspect_type - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_entry_type] = mock_rpc request = {} - client.delete_aspect_type(request) + client.get_entry_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_aspect_type(request) + client.get_entry_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_aspect_type_rest_required_fields( - request_type=catalog.DeleteAspectTypeRequest, -): +def test_get_entry_type_rest_required_fields(request_type=catalog.GetEntryTypeRequest): transport_class = transports.CatalogServiceRestTransport request_init = {} @@ -12738,7 +13151,7 @@ def test_delete_aspect_type_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_aspect_type._get_unset_required_fields(jsonified_request) + ).get_entry_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -12747,9 +13160,7 @@ def test_delete_aspect_type_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_aspect_type._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("etag",)) + ).get_entry_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -12763,7 +13174,7 @@ def test_delete_aspect_type_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = catalog.EntryType() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -12775,36 +13186,39 @@ def test_delete_aspect_type_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.EntryType.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_aspect_type(request) + response = client.get_entry_type(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_aspect_type_rest_unset_required_fields(): +def test_get_entry_type_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_aspect_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(("etag",)) & set(("name",))) + unset_fields = transport.get_entry_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_delete_aspect_type_rest_flattened(): +def test_get_entry_type_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -12813,11 +13227,11 @@ def test_delete_aspect_type_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = catalog.EntryType() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/aspectTypes/sample3" + "name": "projects/sample1/locations/sample2/entryTypes/sample3" } # get truthy value for each flattened field @@ -12829,25 +13243,26 @@ def test_delete_aspect_type_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.EntryType.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_aspect_type(**mock_args) + client.get_entry_type(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/aspectTypes/*}" - % client.transport._host, + "%s/v1/{name=projects/*/locations/*/entryTypes/*}" % client.transport._host, args[1], ) -def test_delete_aspect_type_rest_flattened_error(transport: str = "rest"): +def test_get_entry_type_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -12856,13 +13271,13 @@ def test_delete_aspect_type_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_aspect_type( - catalog.DeleteAspectTypeRequest(), - name="name_value", + client.get_entry_type( + catalog.GetEntryTypeRequest(), + name="name_value", ) -def test_list_aspect_types_rest_use_cached_wrapped_rpc(): +def test_create_aspect_type_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -12876,7 +13291,9 @@ def test_list_aspect_types_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_aspect_types in client._transport._wrapped_methods + assert ( + client._transport.create_aspect_type in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -12884,29 +13301,34 @@ def test_list_aspect_types_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_aspect_types + client._transport.create_aspect_type ] = mock_rpc request = {} - client.list_aspect_types(request) + client.create_aspect_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_aspect_types(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_aspect_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_aspect_types_rest_required_fields( - request_type=catalog.ListAspectTypesRequest, +def test_create_aspect_type_rest_required_fields( + request_type=catalog.CreateAspectTypeRequest, ): transport_class = transports.CatalogServiceRestTransport request_init = {} request_init["parent"] = "" + request_init["aspect_type_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -12914,26 +13336,28 @@ def test_list_aspect_types_rest_required_fields( ) # verify fields with default values are dropped + assert "aspectTypeId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_aspect_types._get_unset_required_fields(jsonified_request) + ).create_aspect_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "aspectTypeId" in jsonified_request + assert jsonified_request["aspectTypeId"] == request_init["aspect_type_id"] jsonified_request["parent"] = "parent_value" + jsonified_request["aspectTypeId"] = "aspect_type_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_aspect_types._get_unset_required_fields(jsonified_request) + ).create_aspect_type._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "filter", - "order_by", - "page_size", - "page_token", + "aspect_type_id", + "validate_only", ) ) jsonified_request.update(unset_fields) @@ -12941,6 +13365,8 @@ def test_list_aspect_types_rest_required_fields( # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "aspectTypeId" in jsonified_request + assert jsonified_request["aspectTypeId"] == "aspect_type_id_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12949,7 +13375,7 @@ def test_list_aspect_types_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = catalog.ListAspectTypesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -12961,49 +13387,57 @@ def test_list_aspect_types_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.ListAspectTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_aspect_types(request) + response = client.create_aspect_type(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "aspectTypeId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_aspect_types_rest_unset_required_fields(): +def test_create_aspect_type_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_aspect_types._get_unset_required_fields({}) + unset_fields = transport.create_aspect_type._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "filter", - "orderBy", - "pageSize", - "pageToken", + "aspectTypeId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "aspectTypeId", + "aspectType", ) ) - & set(("parent",)) ) -def test_list_aspect_types_rest_flattened(): +def test_create_aspect_type_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13012,7 +13446,7 @@ def test_list_aspect_types_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.ListAspectTypesResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/locations/sample2"} @@ -13020,20 +13454,20 @@ def test_list_aspect_types_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", + aspect_type=catalog.AspectType(name="name_value"), + aspect_type_id="aspect_type_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = catalog.ListAspectTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_aspect_types(**mock_args) + client.create_aspect_type(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -13046,7 +13480,7 @@ def test_list_aspect_types_rest_flattened(): ) -def test_list_aspect_types_rest_flattened_error(transport: str = "rest"): +def test_create_aspect_type_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13055,74 +13489,15 @@ def test_list_aspect_types_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_aspect_types( - catalog.ListAspectTypesRequest(), + client.create_aspect_type( + catalog.CreateAspectTypeRequest(), parent="parent_value", + aspect_type=catalog.AspectType(name="name_value"), + aspect_type_id="aspect_type_id_value", ) -def test_list_aspect_types_rest_pager(transport: str = "rest"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - catalog.ListAspectTypesResponse( - aspect_types=[ - catalog.AspectType(), - catalog.AspectType(), - catalog.AspectType(), - ], - next_page_token="abc", - ), - catalog.ListAspectTypesResponse( - aspect_types=[], - next_page_token="def", - ), - catalog.ListAspectTypesResponse( - aspect_types=[ - catalog.AspectType(), - ], - next_page_token="ghi", - ), - catalog.ListAspectTypesResponse( - aspect_types=[ - catalog.AspectType(), - catalog.AspectType(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(catalog.ListAspectTypesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_aspect_types(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, catalog.AspectType) for i in results) - - pages = list(client.list_aspect_types(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_get_aspect_type_rest_use_cached_wrapped_rpc(): +def test_update_aspect_type_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13136,35 +13511,42 @@ def test_get_aspect_type_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_aspect_type in client._transport._wrapped_methods + assert ( + client._transport.update_aspect_type in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_aspect_type] = mock_rpc + client._transport._wrapped_methods[ + client._transport.update_aspect_type + ] = mock_rpc request = {} - client.get_aspect_type(request) + client.update_aspect_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_aspect_type(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_aspect_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_aspect_type_rest_required_fields( - request_type=catalog.GetAspectTypeRequest, +def test_update_aspect_type_rest_required_fields( + request_type=catalog.UpdateAspectTypeRequest, ): transport_class = transports.CatalogServiceRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -13175,21 +13557,24 @@ def test_get_aspect_type_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_aspect_type._get_unset_required_fields(jsonified_request) + ).update_aspect_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_aspect_type._get_unset_required_fields(jsonified_request) + ).update_aspect_type._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "update_mask", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13198,7 +13583,7 @@ def test_get_aspect_type_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = catalog.AspectType() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -13210,39 +13595,50 @@ def test_get_aspect_type_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.AspectType.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_aspect_type(request) + response = client.update_aspect_type(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_aspect_type_rest_unset_required_fields(): +def test_update_aspect_type_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_aspect_type._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_aspect_type._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "aspectType", + "updateMask", + ) + ) + ) -def test_get_aspect_type_rest_flattened(): +def test_update_aspect_type_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13251,43 +13647,44 @@ def test_get_aspect_type_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.AspectType() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/aspectTypes/sample3" + "aspect_type": { + "name": "projects/sample1/locations/sample2/aspectTypes/sample3" + } } # get truthy value for each flattened field mock_args = dict( - name="name_value", + aspect_type=catalog.AspectType(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = catalog.AspectType.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_aspect_type(**mock_args) + client.update_aspect_type(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/aspectTypes/*}" + "%s/v1/{aspect_type.name=projects/*/locations/*/aspectTypes/*}" % client.transport._host, args[1], ) -def test_get_aspect_type_rest_flattened_error(transport: str = "rest"): +def test_update_aspect_type_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13296,13 +13693,14 @@ def test_get_aspect_type_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_aspect_type( - catalog.GetAspectTypeRequest(), - name="name_value", + client.update_aspect_type( + catalog.UpdateAspectTypeRequest(), + aspect_type=catalog.AspectType(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_create_entry_group_rest_use_cached_wrapped_rpc(): +def test_delete_aspect_type_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13317,7 +13715,7 @@ def test_create_entry_group_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_entry_group in client._transport._wrapped_methods + client._transport.delete_aspect_type in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -13326,11 +13724,11 @@ def test_create_entry_group_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_entry_group + client._transport.delete_aspect_type ] = mock_rpc request = {} - client.create_entry_group(request) + client.delete_aspect_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -13339,21 +13737,20 @@ def test_create_entry_group_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_entry_group(request) + client.delete_aspect_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_entry_group_rest_required_fields( - request_type=catalog.CreateEntryGroupRequest, +def test_delete_aspect_type_rest_required_fields( + request_type=catalog.DeleteAspectTypeRequest, ): transport_class = transports.CatalogServiceRestTransport request_init = {} - request_init["parent"] = "" - request_init["entry_group_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -13361,37 +13758,26 @@ def test_create_entry_group_rest_required_fields( ) # verify fields with default values are dropped - assert "entryGroupId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_entry_group._get_unset_required_fields(jsonified_request) + ).delete_aspect_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "entryGroupId" in jsonified_request - assert jsonified_request["entryGroupId"] == request_init["entry_group_id"] - jsonified_request["parent"] = "parent_value" - jsonified_request["entryGroupId"] = "entry_group_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_entry_group._get_unset_required_fields(jsonified_request) + ).delete_aspect_type._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "entry_group_id", - "validate_only", - ) - ) + assert not set(unset_fields) - set(("etag",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "entryGroupId" in jsonified_request - assert jsonified_request["entryGroupId"] == "entry_group_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13412,10 +13798,9 @@ def test_create_entry_group_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -13426,43 +13811,23 @@ def test_create_entry_group_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_entry_group(request) + response = client.delete_aspect_type(request) - expected_params = [ - ( - "entryGroupId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_entry_group_rest_unset_required_fields(): +def test_delete_aspect_type_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_entry_group._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "entryGroupId", - "validateOnly", - ) - ) - & set( - ( - "parent", - "entryGroupId", - "entryGroup", - ) - ) - ) + unset_fields = transport.delete_aspect_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(("etag",)) & set(("name",))) -def test_create_entry_group_rest_flattened(): +def test_delete_aspect_type_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13474,13 +13839,13 @@ def test_create_entry_group_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/aspectTypes/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - entry_group=catalog.EntryGroup(name="name_value"), - entry_group_id="entry_group_id_value", + name="name_value", ) mock_args.update(sample_request) @@ -13492,20 +13857,20 @@ def test_create_entry_group_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_entry_group(**mock_args) + client.delete_aspect_type(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/entryGroups" + "%s/v1/{name=projects/*/locations/*/aspectTypes/*}" % client.transport._host, args[1], ) -def test_create_entry_group_rest_flattened_error(transport: str = "rest"): +def test_delete_aspect_type_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13514,15 +13879,13 @@ def test_create_entry_group_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_entry_group( - catalog.CreateEntryGroupRequest(), - parent="parent_value", - entry_group=catalog.EntryGroup(name="name_value"), - entry_group_id="entry_group_id_value", + client.delete_aspect_type( + catalog.DeleteAspectTypeRequest(), + name="name_value", ) -def test_update_entry_group_rest_use_cached_wrapped_rpc(): +def test_list_aspect_types_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13536,9 +13899,7 @@ def test_update_entry_group_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_entry_group in client._transport._wrapped_methods - ) + assert client._transport.list_aspect_types in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -13546,32 +13907,29 @@ def test_update_entry_group_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.update_entry_group + client._transport.list_aspect_types ] = mock_rpc request = {} - client.update_entry_group(request) + client.list_aspect_types(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.update_entry_group(request) + client.list_aspect_types(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_entry_group_rest_required_fields( - request_type=catalog.UpdateEntryGroupRequest, +def test_list_aspect_types_rest_required_fields( + request_type=catalog.ListAspectTypesRequest, ): transport_class = transports.CatalogServiceRestTransport request_init = {} + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -13582,24 +13940,30 @@ def test_update_entry_group_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_entry_group._get_unset_required_fields(jsonified_request) + ).list_aspect_types._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_entry_group._get_unset_required_fields(jsonified_request) + ).list_aspect_types._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "update_mask", - "validate_only", + "filter", + "order_by", + "page_size", + "page_token", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13608,7 +13972,7 @@ def test_update_entry_group_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = catalog.ListAspectTypesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -13620,50 +13984,49 @@ def test_update_entry_group_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.ListAspectTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_entry_group(request) + response = client.list_aspect_types(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_entry_group_rest_unset_required_fields(): +def test_list_aspect_types_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_entry_group._get_unset_required_fields({}) + unset_fields = transport.list_aspect_types._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "updateMask", - "validateOnly", - ) - ) - & set( - ( - "entryGroup", - "updateMask", + "filter", + "orderBy", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) -def test_update_entry_group_rest_flattened(): +def test_list_aspect_types_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13672,44 +14035,41 @@ def test_update_entry_group_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = catalog.ListAspectTypesResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "entry_group": { - "name": "projects/sample1/locations/sample2/entryGroups/sample3" - } - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - entry_group=catalog.EntryGroup(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.ListAspectTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_entry_group(**mock_args) + client.list_aspect_types(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{entry_group.name=projects/*/locations/*/entryGroups/*}" + "%s/v1/{parent=projects/*/locations/*}/aspectTypes" % client.transport._host, args[1], ) -def test_update_entry_group_rest_flattened_error(transport: str = "rest"): +def test_list_aspect_types_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13718,14 +14078,74 @@ def test_update_entry_group_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_entry_group( - catalog.UpdateEntryGroupRequest(), - entry_group=catalog.EntryGroup(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.list_aspect_types( + catalog.ListAspectTypesRequest(), + parent="parent_value", ) -def test_delete_entry_group_rest_use_cached_wrapped_rpc(): +def test_list_aspect_types_rest_pager(transport: str = "rest"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + catalog.ListAspectTypesResponse( + aspect_types=[ + catalog.AspectType(), + catalog.AspectType(), + catalog.AspectType(), + ], + next_page_token="abc", + ), + catalog.ListAspectTypesResponse( + aspect_types=[], + next_page_token="def", + ), + catalog.ListAspectTypesResponse( + aspect_types=[ + catalog.AspectType(), + ], + next_page_token="ghi", + ), + catalog.ListAspectTypesResponse( + aspect_types=[ + catalog.AspectType(), + catalog.AspectType(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(catalog.ListAspectTypesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_aspect_types(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, catalog.AspectType) for i in results) + + pages = list(client.list_aspect_types(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_aspect_type_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13739,38 +14159,30 @@ def test_delete_entry_group_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.delete_entry_group in client._transport._wrapped_methods - ) + assert client._transport.get_aspect_type in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.delete_entry_group - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_aspect_type] = mock_rpc request = {} - client.delete_entry_group(request) + client.get_aspect_type(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_entry_group(request) + client.get_aspect_type(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_entry_group_rest_required_fields( - request_type=catalog.DeleteEntryGroupRequest, +def test_get_aspect_type_rest_required_fields( + request_type=catalog.GetAspectTypeRequest, ): transport_class = transports.CatalogServiceRestTransport @@ -13786,7 +14198,7 @@ def test_delete_entry_group_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_entry_group._get_unset_required_fields(jsonified_request) + ).get_aspect_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -13795,9 +14207,7 @@ def test_delete_entry_group_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_entry_group._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("etag",)) + ).get_aspect_type._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -13811,7 +14221,7 @@ def test_delete_entry_group_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = catalog.AspectType() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -13823,36 +14233,39 @@ def test_delete_entry_group_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.AspectType.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_entry_group(request) + response = client.get_aspect_type(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_entry_group_rest_unset_required_fields(): +def test_get_aspect_type_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_entry_group._get_unset_required_fields({}) - assert set(unset_fields) == (set(("etag",)) & set(("name",))) + unset_fields = transport.get_aspect_type._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_delete_entry_group_rest_flattened(): +def test_get_aspect_type_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -13861,11 +14274,11 @@ def test_delete_entry_group_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = catalog.AspectType() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/entryGroups/sample3" + "name": "projects/sample1/locations/sample2/aspectTypes/sample3" } # get truthy value for each flattened field @@ -13877,25 +14290,27 @@ def test_delete_entry_group_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.AspectType.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_entry_group(**mock_args) + client.get_aspect_type(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/entryGroups/*}" + "%s/v1/{name=projects/*/locations/*/aspectTypes/*}" % client.transport._host, args[1], ) -def test_delete_entry_group_rest_flattened_error(transport: str = "rest"): +def test_get_aspect_type_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -13904,13 +14319,13 @@ def test_delete_entry_group_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_entry_group( - catalog.DeleteEntryGroupRequest(), + client.get_aspect_type( + catalog.GetAspectTypeRequest(), name="name_value", ) -def test_list_entry_groups_rest_use_cached_wrapped_rpc(): +def test_create_entry_group_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -13924,7 +14339,9 @@ def test_list_entry_groups_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_entry_groups in client._transport._wrapped_methods + assert ( + client._transport.create_entry_group in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() @@ -13932,29 +14349,34 @@ def test_list_entry_groups_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.list_entry_groups + client._transport.create_entry_group ] = mock_rpc request = {} - client.list_entry_groups(request) + client.create_entry_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_entry_groups(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_entry_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_entry_groups_rest_required_fields( - request_type=catalog.ListEntryGroupsRequest, +def test_create_entry_group_rest_required_fields( + request_type=catalog.CreateEntryGroupRequest, ): transport_class = transports.CatalogServiceRestTransport request_init = {} request_init["parent"] = "" + request_init["entry_group_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -13962,26 +14384,28 @@ def test_list_entry_groups_rest_required_fields( ) # verify fields with default values are dropped + assert "entryGroupId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_entry_groups._get_unset_required_fields(jsonified_request) + ).create_entry_group._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "entryGroupId" in jsonified_request + assert jsonified_request["entryGroupId"] == request_init["entry_group_id"] jsonified_request["parent"] = "parent_value" + jsonified_request["entryGroupId"] = "entry_group_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_entry_groups._get_unset_required_fields(jsonified_request) + ).create_entry_group._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "filter", - "order_by", - "page_size", - "page_token", + "entry_group_id", + "validate_only", ) ) jsonified_request.update(unset_fields) @@ -13989,6 +14413,8 @@ def test_list_entry_groups_rest_required_fields( # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "entryGroupId" in jsonified_request + assert jsonified_request["entryGroupId"] == "entry_group_id_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -13997,7 +14423,7 @@ def test_list_entry_groups_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = catalog.ListEntryGroupsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -14009,49 +14435,57 @@ def test_list_entry_groups_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.ListEntryGroupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_entry_groups(request) + response = client.create_entry_group(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "entryGroupId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_entry_groups_rest_unset_required_fields(): +def test_create_entry_group_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_entry_groups._get_unset_required_fields({}) + unset_fields = transport.create_entry_group._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "filter", - "orderBy", - "pageSize", - "pageToken", + "entryGroupId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "entryGroupId", + "entryGroup", ) ) - & set(("parent",)) ) -def test_list_entry_groups_rest_flattened(): +def test_create_entry_group_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14060,7 +14494,7 @@ def test_list_entry_groups_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.ListEntryGroupsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = {"parent": "projects/sample1/locations/sample2"} @@ -14068,20 +14502,20 @@ def test_list_entry_groups_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", + entry_group=catalog.EntryGroup(name="name_value"), + entry_group_id="entry_group_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = catalog.ListEntryGroupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_entry_groups(**mock_args) + client.create_entry_group(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -14094,7 +14528,7 @@ def test_list_entry_groups_rest_flattened(): ) -def test_list_entry_groups_rest_flattened_error(transport: str = "rest"): +def test_create_entry_group_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14103,74 +14537,15 @@ def test_list_entry_groups_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_entry_groups( - catalog.ListEntryGroupsRequest(), + client.create_entry_group( + catalog.CreateEntryGroupRequest(), parent="parent_value", + entry_group=catalog.EntryGroup(name="name_value"), + entry_group_id="entry_group_id_value", ) -def test_list_entry_groups_rest_pager(transport: str = "rest"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - catalog.ListEntryGroupsResponse( - entry_groups=[ - catalog.EntryGroup(), - catalog.EntryGroup(), - catalog.EntryGroup(), - ], - next_page_token="abc", - ), - catalog.ListEntryGroupsResponse( - entry_groups=[], - next_page_token="def", - ), - catalog.ListEntryGroupsResponse( - entry_groups=[ - catalog.EntryGroup(), - ], - next_page_token="ghi", - ), - catalog.ListEntryGroupsResponse( - entry_groups=[ - catalog.EntryGroup(), - catalog.EntryGroup(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(catalog.ListEntryGroupsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_entry_groups(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, catalog.EntryGroup) for i in results) - - pages = list(client.list_entry_groups(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_get_entry_group_rest_use_cached_wrapped_rpc(): +def test_update_entry_group_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14184,35 +14559,42 @@ def test_get_entry_group_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_entry_group in client._transport._wrapped_methods + assert ( + client._transport.update_entry_group in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_entry_group] = mock_rpc + client._transport._wrapped_methods[ + client._transport.update_entry_group + ] = mock_rpc request = {} - client.get_entry_group(request) + client.update_entry_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_entry_group(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_entry_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_entry_group_rest_required_fields( - request_type=catalog.GetEntryGroupRequest, +def test_update_entry_group_rest_required_fields( + request_type=catalog.UpdateEntryGroupRequest, ): transport_class = transports.CatalogServiceRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -14223,21 +14605,24 @@ def test_get_entry_group_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_entry_group._get_unset_required_fields(jsonified_request) + ).update_entry_group._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_entry_group._get_unset_required_fields(jsonified_request) + ).update_entry_group._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "update_mask", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14246,7 +14631,7 @@ def test_get_entry_group_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = catalog.EntryGroup() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -14258,84 +14643,96 @@ def test_get_entry_group_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.EntryGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_entry_group(request) + response = client.update_entry_group(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_entry_group_rest_unset_required_fields(): +def test_update_entry_group_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_entry_group._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -def test_get_entry_group_rest_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) + unset_fields = transport.update_entry_group._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "updateMask", + "validateOnly", + ) + ) + & set( + ( + "entryGroup", + "updateMask", + ) + ) + ) + + +def test_update_entry_group_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.EntryGroup() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/entryGroups/sample3" + "entry_group": { + "name": "projects/sample1/locations/sample2/entryGroups/sample3" + } } # get truthy value for each flattened field mock_args = dict( - name="name_value", + entry_group=catalog.EntryGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = catalog.EntryGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_entry_group(**mock_args) + client.update_entry_group(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/entryGroups/*}" + "%s/v1/{entry_group.name=projects/*/locations/*/entryGroups/*}" % client.transport._host, args[1], ) -def test_get_entry_group_rest_flattened_error(transport: str = "rest"): +def test_update_entry_group_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14344,13 +14741,14 @@ def test_get_entry_group_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_entry_group( - catalog.GetEntryGroupRequest(), - name="name_value", + client.update_entry_group( + catalog.UpdateEntryGroupRequest(), + entry_group=catalog.EntryGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_create_entry_rest_use_cached_wrapped_rpc(): +def test_delete_entry_group_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14364,34 +14762,43 @@ def test_create_entry_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_entry in client._transport._wrapped_methods + assert ( + client._transport.delete_entry_group in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_entry] = mock_rpc + client._transport._wrapped_methods[ + client._transport.delete_entry_group + ] = mock_rpc request = {} - client.create_entry(request) + client.delete_entry_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_entry(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_entry_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_entry_rest_required_fields(request_type=catalog.CreateEntryRequest): +def test_delete_entry_group_rest_required_fields( + request_type=catalog.DeleteEntryGroupRequest, +): transport_class = transports.CatalogServiceRestTransport request_init = {} - request_init["parent"] = "" - request_init["entry_id"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -14399,32 +14806,26 @@ def test_create_entry_rest_required_fields(request_type=catalog.CreateEntryReque ) # verify fields with default values are dropped - assert "entryId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_entry._get_unset_required_fields(jsonified_request) + ).delete_entry_group._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "entryId" in jsonified_request - assert jsonified_request["entryId"] == request_init["entry_id"] - jsonified_request["parent"] = "parent_value" - jsonified_request["entryId"] = "entry_id_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_entry._get_unset_required_fields(jsonified_request) + ).delete_entry_group._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("entry_id",)) + assert not set(unset_fields) - set(("etag",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "entryId" in jsonified_request - assert jsonified_request["entryId"] == "entry_id_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14433,7 +14834,7 @@ def test_create_entry_rest_required_fields(request_type=catalog.CreateEntryReque request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = catalog.Entry() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -14445,55 +14846,36 @@ def test_create_entry_rest_required_fields(request_type=catalog.CreateEntryReque pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.Entry.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_entry(request) + response = client.delete_entry_group(request) - expected_params = [ - ( - "entryId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_entry_rest_unset_required_fields(): +def test_delete_entry_group_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_entry._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(("entryId",)) - & set( - ( - "parent", - "entryId", - "entry", - ) - ) - ) + unset_fields = transport.delete_entry_group._get_unset_required_fields({}) + assert set(unset_fields) == (set(("etag",)) & set(("name",))) -def test_create_entry_rest_flattened(): +def test_delete_entry_group_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14502,45 +14884,41 @@ def test_create_entry_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.Entry() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/entryGroups/sample3" + "name": "projects/sample1/locations/sample2/entryGroups/sample3" } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - entry=catalog.Entry(name="name_value"), - entry_id="entry_id_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = catalog.Entry.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_entry(**mock_args) + client.delete_entry_group(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/entryGroups/*}/entries" + "%s/v1/{name=projects/*/locations/*/entryGroups/*}" % client.transport._host, args[1], ) -def test_create_entry_rest_flattened_error(transport: str = "rest"): +def test_delete_entry_group_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14549,15 +14927,13 @@ def test_create_entry_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_entry( - catalog.CreateEntryRequest(), - parent="parent_value", - entry=catalog.Entry(name="name_value"), - entry_id="entry_id_value", + client.delete_entry_group( + catalog.DeleteEntryGroupRequest(), + name="name_value", ) -def test_update_entry_rest_use_cached_wrapped_rpc(): +def test_list_entry_groups_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14571,32 +14947,37 @@ def test_update_entry_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_entry in client._transport._wrapped_methods + assert client._transport.list_entry_groups in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_entry] = mock_rpc + client._transport._wrapped_methods[ + client._transport.list_entry_groups + ] = mock_rpc request = {} - client.update_entry(request) + client.list_entry_groups(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.update_entry(request) + client.list_entry_groups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_entry_rest_required_fields(request_type=catalog.UpdateEntryRequest): +def test_list_entry_groups_rest_required_fields( + request_type=catalog.ListEntryGroupsRequest, +): transport_class = transports.CatalogServiceRestTransport request_init = {} + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -14607,26 +14988,30 @@ def test_update_entry_rest_required_fields(request_type=catalog.UpdateEntryReque unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_entry._get_unset_required_fields(jsonified_request) + ).list_entry_groups._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_entry._get_unset_required_fields(jsonified_request) + ).list_entry_groups._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "allow_missing", - "aspect_keys", - "delete_missing_aspects", - "update_mask", + "filter", + "order_by", + "page_size", + "page_token", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14635,7 +15020,7 @@ def test_update_entry_rest_required_fields(request_type=catalog.UpdateEntryReque request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = catalog.Entry() + return_value = catalog.ListEntryGroupsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -14647,50 +15032,49 @@ def test_update_entry_rest_required_fields(request_type=catalog.UpdateEntryReque pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.Entry.pb(return_value) + return_value = catalog.ListEntryGroupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_entry(request) + response = client.list_entry_groups(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_entry_rest_unset_required_fields(): +def test_list_entry_groups_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_entry._get_unset_required_fields({}) + unset_fields = transport.list_entry_groups._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "allowMissing", - "aspectKeys", - "deleteMissingAspects", - "updateMask", + "filter", + "orderBy", + "pageSize", + "pageToken", ) ) - & set(("entry",)) + & set(("parent",)) ) -def test_update_entry_rest_flattened(): +def test_list_entry_groups_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14699,19 +15083,14 @@ def test_update_entry_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.Entry() + return_value = catalog.ListEntryGroupsResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "entry": { - "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4" - } - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - entry=catalog.Entry(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + parent="parent_value", ) mock_args.update(sample_request) @@ -14719,26 +15098,26 @@ def test_update_entry_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.Entry.pb(return_value) + return_value = catalog.ListEntryGroupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_entry(**mock_args) + client.list_entry_groups(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{entry.name=projects/*/locations/*/entryGroups/*/entries/**}" + "%s/v1/{parent=projects/*/locations/*}/entryGroups" % client.transport._host, args[1], ) -def test_update_entry_rest_flattened_error(transport: str = "rest"): +def test_list_entry_groups_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14747,50 +15126,112 @@ def test_update_entry_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_entry( - catalog.UpdateEntryRequest(), - entry=catalog.Entry(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - - -def test_delete_entry_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + client.list_entry_groups( + catalog.ListEntryGroupsRequest(), + parent="parent_value", ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - # Ensure method has been cached - assert client._transport.delete_entry in client._transport._wrapped_methods +def test_list_entry_groups_rest_pager(transport: str = "rest"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + catalog.ListEntryGroupsResponse( + entry_groups=[ + catalog.EntryGroup(), + catalog.EntryGroup(), + catalog.EntryGroup(), + ], + next_page_token="abc", + ), + catalog.ListEntryGroupsResponse( + entry_groups=[], + next_page_token="def", + ), + catalog.ListEntryGroupsResponse( + entry_groups=[ + catalog.EntryGroup(), + ], + next_page_token="ghi", + ), + catalog.ListEntryGroupsResponse( + entry_groups=[ + catalog.EntryGroup(), + catalog.EntryGroup(), + ], + ), ) - client._transport._wrapped_methods[client._transport.delete_entry] = mock_rpc + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(catalog.ListEntryGroupsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_entry_groups(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, catalog.EntryGroup) for i in results) + + pages = list(client.list_entry_groups(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_entry_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_entry_group in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_entry_group] = mock_rpc request = {} - client.delete_entry(request) + client.get_entry_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_entry(request) + client.get_entry_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_entry_rest_required_fields(request_type=catalog.DeleteEntryRequest): +def test_get_entry_group_rest_required_fields( + request_type=catalog.GetEntryGroupRequest, +): transport_class = transports.CatalogServiceRestTransport request_init = {} @@ -14805,7 +15246,7 @@ def test_delete_entry_rest_required_fields(request_type=catalog.DeleteEntryReque unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_entry._get_unset_required_fields(jsonified_request) + ).get_entry_group._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -14814,7 +15255,7 @@ def test_delete_entry_rest_required_fields(request_type=catalog.DeleteEntryReque unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_entry._get_unset_required_fields(jsonified_request) + ).get_entry_group._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -14828,7 +15269,7 @@ def test_delete_entry_rest_required_fields(request_type=catalog.DeleteEntryReque request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = catalog.Entry() + return_value = catalog.EntryGroup() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -14840,7 +15281,7 @@ def test_delete_entry_rest_required_fields(request_type=catalog.DeleteEntryReque pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result @@ -14849,30 +15290,30 @@ def test_delete_entry_rest_required_fields(request_type=catalog.DeleteEntryReque response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.Entry.pb(return_value) + return_value = catalog.EntryGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_entry(request) + response = client.get_entry_group(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_entry_rest_unset_required_fields(): +def test_get_entry_group_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_entry._get_unset_required_fields({}) + unset_fields = transport.get_entry_group._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_delete_entry_rest_flattened(): +def test_get_entry_group_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14881,11 +15322,11 @@ def test_delete_entry_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.Entry() + return_value = catalog.EntryGroup() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4" + "name": "projects/sample1/locations/sample2/entryGroups/sample3" } # get truthy value for each flattened field @@ -14898,26 +15339,26 @@ def test_delete_entry_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.Entry.pb(return_value) + return_value = catalog.EntryGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_entry(**mock_args) + client.get_entry_group(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/entryGroups/*/entries/**}" + "%s/v1/{name=projects/*/locations/*/entryGroups/*}" % client.transport._host, args[1], ) -def test_delete_entry_rest_flattened_error(transport: str = "rest"): +def test_get_entry_group_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14926,13 +15367,13 @@ def test_delete_entry_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_entry( - catalog.DeleteEntryRequest(), + client.get_entry_group( + catalog.GetEntryGroupRequest(), name="name_value", ) -def test_list_entries_rest_use_cached_wrapped_rpc(): +def test_create_entry_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14946,33 +15387,34 @@ def test_list_entries_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_entries in client._transport._wrapped_methods + assert client._transport.create_entry in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_entries] = mock_rpc + client._transport._wrapped_methods[client._transport.create_entry] = mock_rpc request = {} - client.list_entries(request) + client.create_entry(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_entries(request) + client.create_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_entries_rest_required_fields(request_type=catalog.ListEntriesRequest): +def test_create_entry_rest_required_fields(request_type=catalog.CreateEntryRequest): transport_class = transports.CatalogServiceRestTransport request_init = {} request_init["parent"] = "" + request_init["entry_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -14980,32 +15422,32 @@ def test_list_entries_rest_required_fields(request_type=catalog.ListEntriesReque ) # verify fields with default values are dropped + assert "entryId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_entries._get_unset_required_fields(jsonified_request) + ).create_entry._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "entryId" in jsonified_request + assert jsonified_request["entryId"] == request_init["entry_id"] jsonified_request["parent"] = "parent_value" + jsonified_request["entryId"] = "entry_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_entries._get_unset_required_fields(jsonified_request) + ).create_entry._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "page_size", - "page_token", - ) - ) + assert not set(unset_fields) - set(("entry_id",)) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "entryId" in jsonified_request + assert jsonified_request["entryId"] == "entry_id_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15014,7 +15456,7 @@ def test_list_entries_rest_required_fields(request_type=catalog.ListEntriesReque request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = catalog.ListEntriesResponse() + return_value = catalog.Entry() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -15026,48 +15468,55 @@ def test_list_entries_rest_required_fields(request_type=catalog.ListEntriesReque pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.ListEntriesResponse.pb(return_value) + return_value = catalog.Entry.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_entries(request) + response = client.create_entry(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "entryId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_entries_rest_unset_required_fields(): +def test_create_entry_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_entries._get_unset_required_fields({}) + unset_fields = transport.create_entry._get_unset_required_fields({}) assert set(unset_fields) == ( - set( + set(("entryId",)) + & set( ( - "filter", - "pageSize", - "pageToken", + "parent", + "entryId", + "entry", ) ) - & set(("parent",)) ) -def test_list_entries_rest_flattened(): +def test_create_entry_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15076,7 +15525,7 @@ def test_list_entries_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.ListEntriesResponse() + return_value = catalog.Entry() # get arguments that satisfy an http rule for this method sample_request = { @@ -15086,6 +15535,8 @@ def test_list_entries_rest_flattened(): # get truthy value for each flattened field mock_args = dict( parent="parent_value", + entry=catalog.Entry(name="name_value"), + entry_id="entry_id_value", ) mock_args.update(sample_request) @@ -15093,13 +15544,13 @@ def test_list_entries_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.ListEntriesResponse.pb(return_value) + return_value = catalog.Entry.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_entries(**mock_args) + client.create_entry(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -15112,7 +15563,7 @@ def test_list_entries_rest_flattened(): ) -def test_list_entries_rest_flattened_error(transport: str = "rest"): +def test_create_entry_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15121,116 +15572,54 @@ def test_list_entries_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_entries( - catalog.ListEntriesRequest(), + client.create_entry( + catalog.CreateEntryRequest(), parent="parent_value", + entry=catalog.Entry(name="name_value"), + entry_id="entry_id_value", ) -def test_list_entries_rest_pager(transport: str = "rest"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - catalog.ListEntriesResponse( - entries=[ - catalog.Entry(), - catalog.Entry(), - catalog.Entry(), - ], - next_page_token="abc", - ), - catalog.ListEntriesResponse( - entries=[], - next_page_token="def", - ), - catalog.ListEntriesResponse( - entries=[ - catalog.Entry(), - ], - next_page_token="ghi", - ), - catalog.ListEntriesResponse( - entries=[ - catalog.Entry(), - catalog.Entry(), - ], - ), +def test_update_entry_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Two responses for two calls - response = response + response - # Wrap the values into proper Response objs - response = tuple(catalog.ListEntriesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/entryGroups/sample3" - } - - pager = client.list_entries(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, catalog.Entry) for i in results) - - pages = list(client.list_entries(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_get_entry_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_entry in client._transport._wrapped_methods + assert client._transport.update_entry in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_entry] = mock_rpc + client._transport._wrapped_methods[client._transport.update_entry] = mock_rpc request = {} - client.get_entry(request) + client.update_entry(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_entry(request) + client.update_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_entry_rest_required_fields(request_type=catalog.GetEntryRequest): +def test_update_entry_rest_required_fields(request_type=catalog.UpdateEntryRequest): transport_class = transports.CatalogServiceRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15241,29 +15630,26 @@ def test_get_entry_rest_required_fields(request_type=catalog.GetEntryRequest): unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_entry._get_unset_required_fields(jsonified_request) + ).update_entry._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_entry._get_unset_required_fields(jsonified_request) + ).update_entry._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "aspect_types", - "paths", - "view", + "allow_missing", + "aspect_keys", + "delete_missing_aspects", + "update_mask", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15284,9 +15670,10 @@ def test_get_entry_rest_required_fields(request_type=catalog.GetEntryRequest): pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -15300,32 +15687,33 @@ def test_get_entry_rest_required_fields(request_type=catalog.GetEntryRequest): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_entry(request) + response = client.update_entry(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_entry_rest_unset_required_fields(): +def test_update_entry_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_entry._get_unset_required_fields({}) + unset_fields = transport.update_entry._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "aspectTypes", - "paths", - "view", + "allowMissing", + "aspectKeys", + "deleteMissingAspects", + "updateMask", ) ) - & set(("name",)) + & set(("entry",)) ) -def test_get_entry_rest_flattened(): +def test_update_entry_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15338,12 +15726,15 @@ def test_get_entry_rest_flattened(): # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4" + "entry": { + "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4" + } } # get truthy value for each flattened field mock_args = dict( - name="name_value", + entry=catalog.Entry(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) @@ -15357,20 +15748,20 @@ def test_get_entry_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_entry(**mock_args) + client.update_entry(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/entryGroups/*/entries/**}" + "%s/v1/{entry.name=projects/*/locations/*/entryGroups/*/entries/**}" % client.transport._host, args[1], ) -def test_get_entry_rest_flattened_error(transport: str = "rest"): +def test_update_entry_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15379,13 +15770,14 @@ def test_get_entry_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_entry( - catalog.GetEntryRequest(), - name="name_value", + client.update_entry( + catalog.UpdateEntryRequest(), + entry=catalog.Entry(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_lookup_entry_rest_use_cached_wrapped_rpc(): +def test_delete_entry_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15399,34 +15791,33 @@ def test_lookup_entry_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.lookup_entry in client._transport._wrapped_methods + assert client._transport.delete_entry in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.lookup_entry] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_entry] = mock_rpc request = {} - client.lookup_entry(request) + client.delete_entry(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.lookup_entry(request) + client.delete_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_lookup_entry_rest_required_fields(request_type=catalog.LookupEntryRequest): +def test_delete_entry_rest_required_fields(request_type=catalog.DeleteEntryRequest): transport_class = transports.CatalogServiceRestTransport request_init = {} request_init["name"] = "" - request_init["entry"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15434,39 +15825,24 @@ def test_lookup_entry_rest_required_fields(request_type=catalog.LookupEntryReque ) # verify fields with default values are dropped - assert "entry" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).lookup_entry._get_unset_required_fields(jsonified_request) + ).delete_entry._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "entry" in jsonified_request - assert jsonified_request["entry"] == request_init["entry"] jsonified_request["name"] = "name_value" - jsonified_request["entry"] = "entry_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).lookup_entry._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "aspect_types", - "entry", - "paths", - "view", - ) - ) + ).delete_entry._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request assert jsonified_request["name"] == "name_value" - assert "entry" in jsonified_request - assert jsonified_request["entry"] == "entry_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15487,7 +15863,7 @@ def test_lookup_entry_rest_required_fields(request_type=catalog.LookupEntryReque pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "delete", "query_params": pb_request, } transcode.return_value = transcode_result @@ -15503,44 +15879,83 @@ def test_lookup_entry_rest_required_fields(request_type=catalog.LookupEntryReque req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.lookup_entry(request) + response = client.delete_entry(request) - expected_params = [ - ( - "entry", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_lookup_entry_rest_unset_required_fields(): +def test_delete_entry_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.lookup_entry._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "aspectTypes", - "entry", - "paths", - "view", - ) + unset_fields = transport.delete_entry._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_delete_entry_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = catalog.Entry() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", ) - & set( - ( - "name", - "entry", - ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.Entry.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_entry(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/entryGroups/*/entries/**}" + % client.transport._host, + args[1], ) + + +def test_delete_entry_rest_flattened_error(transport: str = "rest"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_entry( + catalog.DeleteEntryRequest(), + name="name_value", + ) + -def test_search_entries_rest_use_cached_wrapped_rpc(): +def test_list_entries_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15554,34 +15969,33 @@ def test_search_entries_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.search_entries in client._transport._wrapped_methods + assert client._transport.list_entries in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.search_entries] = mock_rpc + client._transport._wrapped_methods[client._transport.list_entries] = mock_rpc request = {} - client.search_entries(request) + client.list_entries(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.search_entries(request) + client.list_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_search_entries_rest_required_fields(request_type=catalog.SearchEntriesRequest): +def test_list_entries_rest_required_fields(request_type=catalog.ListEntriesRequest): transport_class = transports.CatalogServiceRestTransport request_init = {} - request_init["name"] = "" - request_init["query"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15589,41 +16003,32 @@ def test_search_entries_rest_required_fields(request_type=catalog.SearchEntriesR ) # verify fields with default values are dropped - assert "query" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).search_entries._get_unset_required_fields(jsonified_request) + ).list_entries._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - assert "query" in jsonified_request - assert jsonified_request["query"] == request_init["query"] - jsonified_request["name"] = "name_value" - jsonified_request["query"] = "query_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).search_entries._get_unset_required_fields(jsonified_request) + ).list_entries._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "order_by", + "filter", "page_size", "page_token", - "query", - "scope", - "semantic_search", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - assert "query" in jsonified_request - assert jsonified_request["query"] == "query_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15632,7 +16037,7 @@ def test_search_entries_rest_required_fields(request_type=catalog.SearchEntriesR request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = catalog.SearchEntriesResponse() + return_value = catalog.ListEntriesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -15644,7 +16049,7 @@ def test_search_entries_rest_required_fields(request_type=catalog.SearchEntriesR pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result @@ -15653,53 +16058,39 @@ def test_search_entries_rest_required_fields(request_type=catalog.SearchEntriesR response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.SearchEntriesResponse.pb(return_value) + return_value = catalog.ListEntriesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.search_entries(request) + response = client.list_entries(request) - expected_params = [ - ( - "query", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] + expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_search_entries_rest_unset_required_fields(): +def test_list_entries_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.search_entries._get_unset_required_fields({}) + unset_fields = transport.list_entries._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "orderBy", + "filter", "pageSize", "pageToken", - "query", - "scope", - "semanticSearch", - ) - ) - & set( - ( - "name", - "query", ) ) + & set(("parent",)) ) -def test_search_entries_rest_flattened(): +def test_list_entries_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15708,15 +16099,16 @@ def test_search_entries_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.SearchEntriesResponse() + return_value = catalog.ListEntriesResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2"} + sample_request = { + "parent": "projects/sample1/locations/sample2/entryGroups/sample3" + } # get truthy value for each flattened field mock_args = dict( - name="name_value", - query="query_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -15724,26 +16116,26 @@ def test_search_entries_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.SearchEntriesResponse.pb(return_value) + return_value = catalog.ListEntriesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.search_entries(**mock_args) + client.list_entries(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*}:searchEntries" + "%s/v1/{parent=projects/*/locations/*/entryGroups/*}/entries" % client.transport._host, args[1], ) -def test_search_entries_rest_flattened_error(transport: str = "rest"): +def test_list_entries_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15752,14 +16144,13 @@ def test_search_entries_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.search_entries( - catalog.SearchEntriesRequest(), - name="name_value", - query="query_value", + client.list_entries( + catalog.ListEntriesRequest(), + parent="parent_value", ) -def test_search_entries_rest_pager(transport: str = "rest"): +def test_list_entries_rest_pager(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15771,28 +16162,28 @@ def test_search_entries_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - catalog.SearchEntriesResponse( - results=[ - catalog.SearchEntriesResult(), - catalog.SearchEntriesResult(), - catalog.SearchEntriesResult(), + catalog.ListEntriesResponse( + entries=[ + catalog.Entry(), + catalog.Entry(), + catalog.Entry(), ], next_page_token="abc", ), - catalog.SearchEntriesResponse( - results=[], + catalog.ListEntriesResponse( + entries=[], next_page_token="def", ), - catalog.SearchEntriesResponse( - results=[ - catalog.SearchEntriesResult(), + catalog.ListEntriesResponse( + entries=[ + catalog.Entry(), ], next_page_token="ghi", ), - catalog.SearchEntriesResponse( - results=[ - catalog.SearchEntriesResult(), - catalog.SearchEntriesResult(), + catalog.ListEntriesResponse( + entries=[ + catalog.Entry(), + catalog.Entry(), ], ), ) @@ -15800,27 +16191,29 @@ def test_search_entries_rest_pager(transport: str = "rest"): response = response + response # Wrap the values into proper Response objs - response = tuple(catalog.SearchEntriesResponse.to_json(x) for x in response) + response = tuple(catalog.ListEntriesResponse.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {"name": "projects/sample1/locations/sample2"} + sample_request = { + "parent": "projects/sample1/locations/sample2/entryGroups/sample3" + } - pager = client.search_entries(request=sample_request) + pager = client.list_entries(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, catalog.SearchEntriesResult) for i in results) + assert all(isinstance(i, catalog.Entry) for i in results) - pages = list(client.search_entries(request=sample_request).pages) + pages = list(client.list_entries(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -def test_create_metadata_job_rest_use_cached_wrapped_rpc(): +def test_get_entry_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15834,43 +16227,33 @@ def test_create_metadata_job_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.create_metadata_job in client._transport._wrapped_methods - ) + assert client._transport.get_entry in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.create_metadata_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_entry] = mock_rpc request = {} - client.create_metadata_job(request) + client.get_entry(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_metadata_job(request) + client.get_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_metadata_job_rest_required_fields( - request_type=catalog.CreateMetadataJobRequest, -): +def test_get_entry_rest_required_fields(request_type=catalog.GetEntryRequest): transport_class = transports.CatalogServiceRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15881,28 +16264,29 @@ def test_create_metadata_job_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_metadata_job._get_unset_required_fields(jsonified_request) + ).get_entry._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_metadata_job._get_unset_required_fields(jsonified_request) + ).get_entry._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "metadata_job_id", - "validate_only", + "aspect_types", + "paths", + "view", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15911,7 +16295,7 @@ def test_create_metadata_job_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = catalog.Entry() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -15923,50 +16307,48 @@ def test_create_metadata_job_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.Entry.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_metadata_job(request) + response = client.get_entry(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_metadata_job_rest_unset_required_fields(): +def test_get_entry_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_metadata_job._get_unset_required_fields({}) + unset_fields = transport.get_entry._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "metadataJobId", - "validateOnly", - ) - ) - & set( - ( - "parent", - "metadataJob", + "aspectTypes", + "paths", + "view", ) ) + & set(("name",)) ) -def test_create_metadata_job_rest_flattened(): +def test_get_entry_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15975,41 +16357,43 @@ def test_create_metadata_job_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = catalog.Entry() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - metadata_job=catalog.MetadataJob(name="name_value"), - metadata_job_id="metadata_job_id_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.Entry.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_metadata_job(**mock_args) + client.get_entry(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/metadataJobs" + "%s/v1/{name=projects/*/locations/*/entryGroups/*/entries/**}" % client.transport._host, args[1], ) -def test_create_metadata_job_rest_flattened_error(transport: str = "rest"): +def test_get_entry_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16018,15 +16402,13 @@ def test_create_metadata_job_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_metadata_job( - catalog.CreateMetadataJobRequest(), - parent="parent_value", - metadata_job=catalog.MetadataJob(name="name_value"), - metadata_job_id="metadata_job_id_value", + client.get_entry( + catalog.GetEntryRequest(), + name="name_value", ) -def test_get_metadata_job_rest_use_cached_wrapped_rpc(): +def test_lookup_entry_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16040,37 +16422,34 @@ def test_get_metadata_job_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_metadata_job in client._transport._wrapped_methods + assert client._transport.lookup_entry in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_metadata_job - ] = mock_rpc + client._transport._wrapped_methods[client._transport.lookup_entry] = mock_rpc request = {} - client.get_metadata_job(request) + client.lookup_entry(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_metadata_job(request) + client.lookup_entry(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_metadata_job_rest_required_fields( - request_type=catalog.GetMetadataJobRequest, -): +def test_lookup_entry_rest_required_fields(request_type=catalog.LookupEntryRequest): transport_class = transports.CatalogServiceRestTransport request_init = {} request_init["name"] = "" + request_init["entry"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -16078,24 +16457,39 @@ def test_get_metadata_job_rest_required_fields( ) # verify fields with default values are dropped + assert "entry" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_metadata_job._get_unset_required_fields(jsonified_request) + ).lookup_entry._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "entry" in jsonified_request + assert jsonified_request["entry"] == request_init["entry"] jsonified_request["name"] = "name_value" + jsonified_request["entry"] = "entry_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_metadata_job._get_unset_required_fields(jsonified_request) + ).lookup_entry._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "aspect_types", + "entry", + "paths", + "view", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request assert jsonified_request["name"] == "name_value" + assert "entry" in jsonified_request + assert jsonified_request["entry"] == "entry_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16104,7 +16498,7 @@ def test_get_metadata_job_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = catalog.MetadataJob() + return_value = catalog.Entry() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16125,90 +16519,51 @@ def test_get_metadata_job_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.MetadataJob.pb(return_value) + return_value = catalog.Entry.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_metadata_job(request) + response = client.lookup_entry(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "entry", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_metadata_job_rest_unset_required_fields(): +def test_lookup_entry_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_metadata_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.lookup_entry._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "aspectTypes", + "entry", + "paths", + "view", + ) + ) + & set( + ( + "name", + "entry", + ) + ) + ) -def test_get_metadata_job_rest_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = catalog.MetadataJob() - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/metadataJobs/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = catalog.MetadataJob.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.get_metadata_job(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/metadataJobs/*}" - % client.transport._host, - args[1], - ) - - -def test_get_metadata_job_rest_flattened_error(transport: str = "rest"): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_metadata_job( - catalog.GetMetadataJobRequest(), - name="name_value", - ) - - -def test_list_metadata_jobs_rest_use_cached_wrapped_rpc(): +def test_search_entries_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16222,39 +16577,34 @@ def test_list_metadata_jobs_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.list_metadata_jobs in client._transport._wrapped_methods - ) + assert client._transport.search_entries in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_metadata_jobs - ] = mock_rpc + client._transport._wrapped_methods[client._transport.search_entries] = mock_rpc request = {} - client.list_metadata_jobs(request) + client.search_entries(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_metadata_jobs(request) + client.search_entries(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_metadata_jobs_rest_required_fields( - request_type=catalog.ListMetadataJobsRequest, -): +def test_search_entries_rest_required_fields(request_type=catalog.SearchEntriesRequest): transport_class = transports.CatalogServiceRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" + request_init["query"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -16262,33 +16612,41 @@ def test_list_metadata_jobs_rest_required_fields( ) # verify fields with default values are dropped + assert "query" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_metadata_jobs._get_unset_required_fields(jsonified_request) + ).search_entries._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "query" in jsonified_request + assert jsonified_request["query"] == request_init["query"] - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" + jsonified_request["query"] = "query_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_metadata_jobs._get_unset_required_fields(jsonified_request) + ).search_entries._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "filter", "order_by", "page_size", "page_token", + "query", + "scope", + "semantic_search", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "query" in jsonified_request + assert jsonified_request["query"] == "query_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16297,7 +16655,7 @@ def test_list_metadata_jobs_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = catalog.ListMetadataJobsResponse() + return_value = catalog.SearchEntriesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16309,7 +16667,7 @@ def test_list_metadata_jobs_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } transcode.return_value = transcode_result @@ -16318,40 +16676,53 @@ def test_list_metadata_jobs_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.ListMetadataJobsResponse.pb(return_value) + return_value = catalog.SearchEntriesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_metadata_jobs(request) + response = client.search_entries(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "query", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_metadata_jobs_rest_unset_required_fields(): +def test_search_entries_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_metadata_jobs._get_unset_required_fields({}) + unset_fields = transport.search_entries._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "filter", "orderBy", "pageSize", "pageToken", + "query", + "scope", + "semanticSearch", + ) + ) + & set( + ( + "name", + "query", ) ) - & set(("parent",)) ) -def test_list_metadata_jobs_rest_flattened(): +def test_search_entries_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16360,14 +16731,15 @@ def test_list_metadata_jobs_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.ListMetadataJobsResponse() + return_value = catalog.SearchEntriesResponse() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = {"name": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", + query="query_value", ) mock_args.update(sample_request) @@ -16375,26 +16747,26 @@ def test_list_metadata_jobs_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.ListMetadataJobsResponse.pb(return_value) + return_value = catalog.SearchEntriesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_metadata_jobs(**mock_args) + client.search_entries(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/metadataJobs" + "%s/v1/{name=projects/*/locations/*}:searchEntries" % client.transport._host, args[1], ) -def test_list_metadata_jobs_rest_flattened_error(transport: str = "rest"): +def test_search_entries_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16403,13 +16775,14 @@ def test_list_metadata_jobs_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_metadata_jobs( - catalog.ListMetadataJobsRequest(), - parent="parent_value", + client.search_entries( + catalog.SearchEntriesRequest(), + name="name_value", + query="query_value", ) -def test_list_metadata_jobs_rest_pager(transport: str = "rest"): +def test_search_entries_rest_pager(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16421,28 +16794,28 @@ def test_list_metadata_jobs_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - catalog.ListMetadataJobsResponse( - metadata_jobs=[ - catalog.MetadataJob(), - catalog.MetadataJob(), - catalog.MetadataJob(), + catalog.SearchEntriesResponse( + results=[ + catalog.SearchEntriesResult(), + catalog.SearchEntriesResult(), + catalog.SearchEntriesResult(), ], next_page_token="abc", ), - catalog.ListMetadataJobsResponse( - metadata_jobs=[], + catalog.SearchEntriesResponse( + results=[], next_page_token="def", ), - catalog.ListMetadataJobsResponse( - metadata_jobs=[ - catalog.MetadataJob(), + catalog.SearchEntriesResponse( + results=[ + catalog.SearchEntriesResult(), ], next_page_token="ghi", ), - catalog.ListMetadataJobsResponse( - metadata_jobs=[ - catalog.MetadataJob(), - catalog.MetadataJob(), + catalog.SearchEntriesResponse( + results=[ + catalog.SearchEntriesResult(), + catalog.SearchEntriesResult(), ], ), ) @@ -16450,27 +16823,27 @@ def test_list_metadata_jobs_rest_pager(transport: str = "rest"): response = response + response # Wrap the values into proper Response objs - response = tuple(catalog.ListMetadataJobsResponse.to_json(x) for x in response) + response = tuple(catalog.SearchEntriesResponse.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = {"name": "projects/sample1/locations/sample2"} - pager = client.list_metadata_jobs(request=sample_request) + pager = client.search_entries(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, catalog.MetadataJob) for i in results) + assert all(isinstance(i, catalog.SearchEntriesResult) for i in results) - pages = list(client.list_metadata_jobs(request=sample_request).pages) + pages = list(client.search_entries(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -def test_cancel_metadata_job_rest_use_cached_wrapped_rpc(): +def test_create_metadata_job_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16485,7 +16858,7 @@ def test_cancel_metadata_job_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.cancel_metadata_job in client._transport._wrapped_methods + client._transport.create_metadata_job in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -16494,29 +16867,33 @@ def test_cancel_metadata_job_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.cancel_metadata_job + client._transport.create_metadata_job ] = mock_rpc request = {} - client.cancel_metadata_job(request) + client.create_metadata_job(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.cancel_metadata_job(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_metadata_job(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_cancel_metadata_job_rest_required_fields( - request_type=catalog.CancelMetadataJobRequest, +def test_create_metadata_job_rest_required_fields( + request_type=catalog.CreateMetadataJobRequest, ): transport_class = transports.CatalogServiceRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -16527,21 +16904,28 @@ def test_cancel_metadata_job_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).cancel_metadata_job._get_unset_required_fields(jsonified_request) + ).create_metadata_job._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).cancel_metadata_job._get_unset_required_fields(jsonified_request) + ).create_metadata_job._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "metadata_job_id", + "validate_only", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16550,7 +16934,7 @@ def test_cancel_metadata_job_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16570,72 +16954,85 @@ def test_cancel_metadata_job_rest_required_fields( response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.cancel_metadata_job(request) + response = client.create_metadata_job(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_cancel_metadata_job_rest_unset_required_fields(): +def test_create_metadata_job_rest_unset_required_fields(): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.cancel_metadata_job._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -def test_cancel_metadata_job_rest_flattened(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/metadataJobs/sample3" - } + unset_fields = transport.create_metadata_job._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "metadataJobId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "metadataJob", + ) + ) + ) + + +def test_create_metadata_job_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", + metadata_job=catalog.MetadataJob(name="name_value"), + metadata_job_id="metadata_job_id_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.cancel_metadata_job(**mock_args) + client.create_metadata_job(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/metadataJobs/*}:cancel" + "%s/v1/{parent=projects/*/locations/*}/metadataJobs" % client.transport._host, args[1], ) -def test_cancel_metadata_job_rest_flattened_error(transport: str = "rest"): +def test_create_metadata_job_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16644,1461 +17041,3608 @@ def test_cancel_metadata_job_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.cancel_metadata_job( - catalog.CancelMetadataJobRequest(), - name="name_value", + client.create_metadata_job( + catalog.CreateMetadataJobRequest(), + parent="parent_value", + metadata_job=catalog.MetadataJob(name="name_value"), + metadata_job_id="metadata_job_id_value", ) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.CatalogServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): +def test_get_metadata_job_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.CatalogServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CatalogServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # It is an error to provide an api_key and a transport instance. - transport = transports.CatalogServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = CatalogServiceClient( - client_options=options, - transport=transport, - ) + # Ensure method has been cached + assert client._transport.get_metadata_job in client._transport._wrapped_methods - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = CatalogServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client._transport._wrapped_methods[ + client._transport.get_metadata_job + ] = mock_rpc - # It is an error to provide scopes and a transport instance. - transport = transports.CatalogServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CatalogServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) + request = {} + client.get_metadata_job(request) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.CatalogServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = CatalogServiceClient(transport=transport) - assert client.transport is transport + client.get_metadata_job(request) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.CatalogServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - transport = transports.CatalogServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), +def test_get_metadata_job_rest_required_fields( + request_type=catalog.GetMetadataJobRequest, +): + transport_class = transports.CatalogServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - channel = transport.grpc_channel - assert channel + # verify fields with default values are dropped -@pytest.mark.parametrize( - "transport_class", - [ - transports.CatalogServiceGrpcTransport, - transports.CatalogServiceGrpcAsyncIOTransport, - transports.CatalogServiceRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_metadata_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with default values are now present -def test_transport_kind_grpc(): - transport = CatalogServiceClient.get_transport_class("grpc")( + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" + ).get_metadata_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -def test_initialize_client_w_grpc(): client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - assert client is not None + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = catalog.MetadataJob() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + response_value = Response() + response_value.status_code = 200 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_entry_type_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Convert return value to protobuf type + return_value = catalog.MetadataJob.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_type), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_entry_type(request=None) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateEntryTypeRequest() + response = client.get_metadata_job(request) - assert args[0] == request_msg + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_entry_type_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", +def test_get_metadata_job_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_type), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.update_entry_type(request=None) + unset_fields = transport.get_metadata_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.UpdateEntryTypeRequest() - assert args[0] == request_msg - - -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_entry_type_empty_call_grpc(): +def test_get_metadata_job_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_type), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_entry_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteEntryTypeRequest() - - assert args[0] == request_msg + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = catalog.MetadataJob() + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/metadataJobs/sample3" + } -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_entry_types_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_entry_types), "__call__") as call: - call.return_value = catalog.ListEntryTypesResponse() - client.list_entry_types(request=None) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.MetadataJob.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListEntryTypesRequest() + client.get_metadata_job(**mock_args) - assert args[0] == request_msg + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/metadataJobs/*}" + % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_entry_type_empty_call_grpc(): +def test_get_metadata_job_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_entry_type), "__call__") as call: - call.return_value = catalog.EntryType() - client.get_entry_type(request=None) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_metadata_job( + catalog.GetMetadataJobRequest(), + name="name_value", + ) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetEntryTypeRequest() - assert args[0] == request_msg +def test_list_metadata_jobs_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_aspect_type_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Ensure method has been cached + assert ( + client._transport.list_metadata_jobs in client._transport._wrapped_methods + ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_aspect_type), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_aspect_type(request=None) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_metadata_jobs + ] = mock_rpc - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateAspectTypeRequest() + request = {} + client.list_metadata_jobs(request) - assert args[0] == request_msg + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + client.list_metadata_jobs(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_aspect_type_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_aspect_type), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.update_aspect_type(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.UpdateAspectTypeRequest() +def test_list_metadata_jobs_rest_required_fields( + request_type=catalog.ListMetadataJobsRequest, +): + transport_class = transports.CatalogServiceRestTransport - assert args[0] == request_msg + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + # verify fields with default values are dropped -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_aspect_type_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_metadata_jobs._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_aspect_type), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_aspect_type(request=None) + # verify required fields with default values are now present - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteAspectTypeRequest() + jsonified_request["parent"] = "parent_value" - assert args[0] == request_msg + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_metadata_jobs._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_aspect_types_empty_call_grpc(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_aspect_types), "__call__" - ) as call: - call.return_value = catalog.ListAspectTypesResponse() - client.list_aspect_types(request=None) + # Designate an appropriate value for the returned response. + return_value = catalog.ListMetadataJobsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListAspectTypesRequest() + response_value = Response() + response_value.status_code = 200 - assert args[0] == request_msg + # Convert return value to protobuf type + return_value = catalog.ListMetadataJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_aspect_type_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + response = client.list_metadata_jobs(request) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_aspect_type), "__call__") as call: - call.return_value = catalog.AspectType() - client.get_aspect_type(request=None) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetAspectTypeRequest() - assert args[0] == request_msg +def test_list_metadata_jobs_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_metadata_jobs._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_entry_group_empty_call_grpc(): +def test_list_metadata_jobs_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_entry_group(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateEntryGroupRequest() - - assert args[0] == request_msg + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = catalog.ListMetadataJobsResponse() + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_entry_group_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.update_entry_group(request=None) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.ListMetadataJobsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.UpdateEntryGroupRequest() + client.list_metadata_jobs(**mock_args) - assert args[0] == request_msg + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/metadataJobs" + % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_entry_group_empty_call_grpc(): +def test_list_metadata_jobs_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_entry_group(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteEntryGroupRequest() - - assert args[0] == request_msg + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_metadata_jobs( + catalog.ListMetadataJobsRequest(), + parent="parent_value", + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_entry_groups_empty_call_grpc(): +def test_list_metadata_jobs_rest_pager(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), "__call__" - ) as call: - call.return_value = catalog.ListEntryGroupsResponse() - client.list_entry_groups(request=None) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + next_page_token="abc", + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[], + next_page_token="def", + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + ], + next_page_token="ghi", + ), + catalog.ListMetadataJobsResponse( + metadata_jobs=[ + catalog.MetadataJob(), + catalog.MetadataJob(), + ], + ), + ) + # Two responses for two calls + response = response + response - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListEntryGroupsRequest() + # Wrap the values into proper Response objs + response = tuple(catalog.ListMetadataJobsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values - assert args[0] == request_msg + sample_request = {"parent": "projects/sample1/locations/sample2"} + pager = client.list_metadata_jobs(request=sample_request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_entry_group_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, catalog.MetadataJob) for i in results) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: - call.return_value = catalog.EntryGroup() - client.get_entry_group(request=None) + pages = list(client.list_metadata_jobs(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetEntryGroupRequest() - assert args[0] == request_msg +def test_cancel_metadata_job_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_entry_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Ensure method has been cached + assert ( + client._transport.cancel_metadata_job in client._transport._wrapped_methods + ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.create_entry), "__call__") as call: - call.return_value = catalog.Entry() - client.create_entry(request=None) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.cancel_metadata_job + ] = mock_rpc - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateEntryRequest() + request = {} + client.cancel_metadata_job(request) - assert args[0] == request_msg + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + client.cancel_metadata_job(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_entry_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.update_entry), "__call__") as call: - call.return_value = catalog.Entry() - client.update_entry(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.UpdateEntryRequest() +def test_cancel_metadata_job_rest_required_fields( + request_type=catalog.CancelMetadataJobRequest, +): + transport_class = transports.CatalogServiceRestTransport - assert args[0] == request_msg + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + # verify fields with default values are dropped -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_entry_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).cancel_metadata_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: - call.return_value = catalog.Entry() - client.delete_entry(request=None) + # verify required fields with default values are now present - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteEntryRequest() + jsonified_request["name"] = "name_value" - assert args[0] == request_msg + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).cancel_metadata_job._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_entries_empty_call_grpc(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_entries), "__call__") as call: - call.return_value = catalog.ListEntriesResponse() - client.list_entries(request=None) + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListEntriesRequest() + response_value = Response() + response_value.status_code = 200 + json_return_value = "" - assert args[0] == request_msg + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.cancel_metadata_job(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_entry_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_entry), "__call__") as call: - call.return_value = catalog.Entry() - client.get_entry(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetEntryRequest() +def test_cancel_metadata_job_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - assert args[0] == request_msg + unset_fields = transport.cancel_metadata_job._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_lookup_entry_empty_call_grpc(): +def test_cancel_metadata_job_rest_flattened(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.lookup_entry), "__call__") as call: - call.return_value = catalog.Entry() - client.lookup_entry(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.LookupEntryRequest() - - assert args[0] == request_msg + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/metadataJobs/sample3" + } -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_search_entries_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.search_entries), "__call__") as call: - call.return_value = catalog.SearchEntriesResponse() - client.search_entries(request=None) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.SearchEntriesRequest() + client.cancel_metadata_job(**mock_args) - assert args[0] == request_msg + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/metadataJobs/*}:cancel" + % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_metadata_job_empty_call_grpc(): +def test_cancel_metadata_job_rest_flattened_error(transport: str = "rest"): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_metadata_job), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_metadata_job(request=None) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel_metadata_job( + catalog.CancelMetadataJobRequest(), + name="name_value", + ) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateMetadataJobRequest() - assert args[0] == request_msg +def test_create_entry_link_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_metadata_job_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Ensure method has been cached + assert client._transport.create_entry_link in client._transport._wrapped_methods - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_metadata_job), "__call__") as call: - call.return_value = catalog.MetadataJob() - client.get_metadata_job(request=None) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_entry_link + ] = mock_rpc - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetMetadataJobRequest() + request = {} + client.create_entry_link(request) - assert args[0] == request_msg + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + client.create_entry_link(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_metadata_jobs_empty_call_grpc(): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_metadata_jobs), "__call__" - ) as call: - call.return_value = catalog.ListMetadataJobsResponse() - client.list_metadata_jobs(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListMetadataJobsRequest() +def test_create_entry_link_rest_required_fields( + request_type=catalog.CreateEntryLinkRequest, +): + transport_class = transports.CatalogServiceRestTransport - assert args[0] == request_msg + request_init = {} + request_init["parent"] = "" + request_init["entry_link_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + # verify fields with default values are dropped + assert "entryLinkId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_entry_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "entryLinkId" in jsonified_request + assert jsonified_request["entryLinkId"] == request_init["entry_link_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["entryLinkId"] = "entry_link_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_entry_link._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("entry_link_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "entryLinkId" in jsonified_request + assert jsonified_request["entryLinkId"] == "entry_link_id_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_cancel_metadata_job_empty_call_grpc(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.cancel_metadata_job), "__call__" - ) as call: - call.return_value = None - client.cancel_metadata_job(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CancelMetadataJobRequest() + # Designate an appropriate value for the returned response. + return_value = catalog.EntryLink() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - assert args[0] == request_msg + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.EntryLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) -def test_transport_kind_grpc_asyncio(): - transport = CatalogServiceAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() - ) - assert transport.kind == "grpc_asyncio" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_entry_link(request) -def test_initialize_client_w_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), transport="grpc_asyncio" - ) - assert client is not None + expected_params = [ + ( + "entryLinkId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_entry_type_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_create_entry_link_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_type), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + unset_fields = transport.create_entry_link._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("entryLinkId",)) + & set( + ( + "parent", + "entryLinkId", + "entryLink", + ) ) - await client.create_entry_type(request=None) + ) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateEntryTypeRequest() - assert args[0] == request_msg +def test_create_entry_link_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = catalog.EntryLink() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_entry_type_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/entryGroups/sample3" + } - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_type), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + entry_link=catalog.EntryLink(name="name_value"), + entry_link_id="entry_link_id_value", ) - await client.update_entry_type(request=None) + mock_args.update(sample_request) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.UpdateEntryTypeRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.EntryLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - assert args[0] == request_msg + client.create_entry_link(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/entryGroups/*}/entryLinks" + % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_entry_type_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_create_entry_link_rest_flattened_error(transport: str = "rest"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_type), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_entry_link( + catalog.CreateEntryLinkRequest(), + parent="parent_value", + entry_link=catalog.EntryLink(name="name_value"), + entry_link_id="entry_link_id_value", ) - await client.delete_entry_type(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteEntryTypeRequest() - assert args[0] == request_msg +def test_delete_entry_link_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_entry_types_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Ensure method has been cached + assert client._transport.delete_entry_link in client._transport._wrapped_methods - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_entry_types), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - catalog.ListEntryTypesResponse( - next_page_token="next_page_token_value", - unreachable_locations=["unreachable_locations_value"], - ) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) - await client.list_entry_types(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListEntryTypesRequest() + client._transport._wrapped_methods[ + client._transport.delete_entry_link + ] = mock_rpc - assert args[0] == request_msg + request = {} + client.delete_entry_link(request) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_entry_type_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + client.delete_entry_link(request) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_entry_type), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - catalog.EntryType( - name="name_value", - uid="uid_value", - description="description_value", - display_name="display_name_value", - etag="etag_value", - type_aliases=["type_aliases_value"], - platform="platform_value", - system="system_value", - ) - ) - await client.get_entry_type(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetEntryTypeRequest() + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - assert args[0] == request_msg +def test_delete_entry_link_rest_required_fields( + request_type=catalog.DeleteEntryLinkRequest, +): + transport_class = transports.CatalogServiceRestTransport -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_aspect_type_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_aspect_type), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.create_aspect_type(request=None) + # verify fields with default values are dropped - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateAspectTypeRequest() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_entry_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - assert args[0] == request_msg + # verify required fields with default values are now present + jsonified_request["name"] = "name_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_aspect_type_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_entry_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_aspect_type), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.update_aspect_type(request=None) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.UpdateAspectTypeRequest() + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) - assert args[0] == request_msg + # Designate an appropriate value for the returned response. + return_value = catalog.EntryLink() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + response_value = Response() + response_value.status_code = 200 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_aspect_type_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Convert return value to protobuf type + return_value = catalog.EntryLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_aspect_type), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.delete_aspect_type(request=None) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteAspectTypeRequest() + response = client.delete_entry_link(request) - assert args[0] == request_msg + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_aspect_types_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_delete_entry_link_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_aspect_types), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - catalog.ListAspectTypesResponse( - next_page_token="next_page_token_value", - unreachable_locations=["unreachable_locations_value"], - ) - ) - await client.list_aspect_types(request=None) + unset_fields = transport.delete_entry_link._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListAspectTypesRequest() - assert args[0] == request_msg +def test_delete_entry_link_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = catalog.EntryLink() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_aspect_type_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/entryGroups/sample3/entryLinks/sample4" + } - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_aspect_type), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - catalog.AspectType( - name="name_value", - uid="uid_value", - description="description_value", - display_name="display_name_value", - etag="etag_value", - transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, - ) + # get truthy value for each flattened field + mock_args = dict( + name="name_value", ) - await client.get_aspect_type(request=None) + mock_args.update(sample_request) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetAspectTypeRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.EntryLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - assert args[0] == request_msg + client.delete_entry_link(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/entryGroups/*/entryLinks/*}" + % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_entry_group_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_delete_entry_link_rest_flattened_error(transport: str = "rest"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_entry_group), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_entry_link( + catalog.DeleteEntryLinkRequest(), + name="name_value", ) - await client.create_entry_group(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateEntryGroupRequest() - - assert args[0] == request_msg -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_entry_group_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_entry_group), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") +def test_get_entry_link_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - await client.update_entry_group(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.UpdateEntryGroupRequest() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - assert args[0] == request_msg + # Ensure method has been cached + assert client._transport.get_entry_link in client._transport._wrapped_methods + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_entry_link] = mock_rpc -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_entry_group_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + request = {} + client.get_entry_link(request) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_entry_group), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.delete_entry_group(request=None) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteEntryGroupRequest() + client.get_entry_link(request) - assert args[0] == request_msg + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_entry_groups_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_get_entry_link_rest_required_fields(request_type=catalog.GetEntryLinkRequest): + transport_class = transports.CatalogServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_entry_groups), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - catalog.ListEntryGroupsResponse( - next_page_token="next_page_token_value", - unreachable_locations=["unreachable_locations_value"], - ) - ) - await client.list_entry_groups(request=None) + # verify fields with default values are dropped - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListEntryGroupsRequest() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_entry_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - assert args[0] == request_msg + # verify required fields with default values are now present + jsonified_request["name"] = "name_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_entry_group_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_entry_link._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - catalog.EntryGroup( - name="name_value", - uid="uid_value", - description="description_value", - display_name="display_name_value", - etag="etag_value", - transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, - ) - ) - await client.get_entry_group(request=None) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetEntryGroupRequest() + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) - assert args[0] == request_msg + # Designate an appropriate value for the returned response. + return_value = catalog.EntryLink() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + response_value = Response() + response_value.status_code = 200 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_entry_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Convert return value to protobuf type + return_value = catalog.EntryLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.create_entry), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - catalog.Entry( - name="name_value", - entry_type="entry_type_value", - parent_entry="parent_entry_value", - fully_qualified_name="fully_qualified_name_value", - ) - ) - await client.create_entry(request=None) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateEntryRequest() + response = client.get_entry_link(request) - assert args[0] == request_msg + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_entry_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_get_entry_link_rest_unset_required_fields(): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.update_entry), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - catalog.Entry( - name="name_value", - entry_type="entry_type_value", - parent_entry="parent_entry_value", - fully_qualified_name="fully_qualified_name_value", - ) - ) - await client.update_entry(request=None) + unset_fields = transport.get_entry_link._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.UpdateEntryRequest() - assert args[0] == request_msg +def test_get_entry_link_rest_flattened(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = catalog.EntryLink() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_entry_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/entryGroups/sample3/entryLinks/sample4" + } - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - catalog.Entry( - name="name_value", - entry_type="entry_type_value", - parent_entry="parent_entry_value", - fully_qualified_name="fully_qualified_name_value", - ) + # get truthy value for each flattened field + mock_args = dict( + name="name_value", ) - await client.delete_entry(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.DeleteEntryRequest() + mock_args.update(sample_request) - assert args[0] == request_msg + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = catalog.EntryLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_entry_link(**mock_args) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_entries_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/entryGroups/*/entryLinks/*}" + % client.transport._host, + args[1], + ) + + +def test_get_entry_link_rest_flattened_error(transport: str = "rest"): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_entries), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - catalog.ListEntriesResponse( - next_page_token="next_page_token_value", - ) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_entry_link( + catalog.GetEntryLinkRequest(), + name="name_value", ) - await client.list_entries(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.ListEntriesRequest() - assert args[0] == request_msg +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CatalogServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + # It is an error to provide a credentials file and a transport instance. + transport = transports.CatalogServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CatalogServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_entry_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + # It is an error to provide an api_key and a transport instance. + transport = transports.CatalogServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CatalogServiceClient( + client_options=options, + transport=transport, + ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_entry), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - catalog.Entry( - name="name_value", - entry_type="entry_type_value", - parent_entry="parent_entry_value", - fully_qualified_name="fully_qualified_name_value", - ) + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CatalogServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() ) - await client.get_entry(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.GetEntryRequest() + # It is an error to provide scopes and a transport instance. + transport = transports.CatalogServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CatalogServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) - assert args[0] == request_msg +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CatalogServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = CatalogServiceClient(transport=transport) + assert client.transport is transport -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_lookup_entry_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CatalogServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), ) + channel = transport.grpc_channel + assert channel - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.lookup_entry), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - catalog.Entry( - name="name_value", - entry_type="entry_type_value", - parent_entry="parent_entry_value", - fully_qualified_name="fully_qualified_name_value", - ) - ) - await client.lookup_entry(request=None) + transport = transports.CatalogServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = catalog.LookupEntryRequest() - assert args[0] == request_msg +@pytest.mark.parametrize( + "transport_class", + [ + transports.CatalogServiceGrpcTransport, + transports.CatalogServiceGrpcAsyncIOTransport, + transports.CatalogServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = CatalogServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_search_entries_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_create_entry_type_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.search_entries), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - catalog.SearchEntriesResponse( - total_size=1086, - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - ) - await client.search_entries(request=None) + with mock.patch.object( + type(client.transport.create_entry_type), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_entry_type(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = catalog.SearchEntriesRequest() + request_msg = catalog.CreateEntryTypeRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_metadata_job_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_update_entry_type_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.create_metadata_job), "__call__" + type(client.transport.update_entry_type), "__call__" ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.create_metadata_job(request=None) + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_entry_type(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = catalog.CreateMetadataJobRequest() + request_msg = catalog.UpdateEntryTypeRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_metadata_job_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_delete_entry_type_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_metadata_job), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - catalog.MetadataJob( - name="name_value", - uid="uid_value", - type_=catalog.MetadataJob.Type.IMPORT, - ) - ) - await client.get_metadata_job(request=None) + with mock.patch.object( + type(client.transport.delete_entry_type), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_entry_type(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = catalog.GetMetadataJobRequest() + request_msg = catalog.DeleteEntryTypeRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_metadata_jobs_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_list_entry_types_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_entry_types), "__call__") as call: + call.return_value = catalog.ListEntryTypesResponse() + client.list_entry_types(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListEntryTypesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_entry_type_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_entry_type), "__call__") as call: + call.return_value = catalog.EntryType() + client.get_entry_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetEntryTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_aspect_type_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.list_metadata_jobs), "__call__" + type(client.transport.create_aspect_type), "__call__" ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - catalog.ListMetadataJobsResponse( - next_page_token="next_page_token_value", - unreachable_locations=["unreachable_locations_value"], - ) - ) - await client.list_metadata_jobs(request=None) + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_aspect_type(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = catalog.ListMetadataJobsRequest() + request_msg = catalog.CreateAspectTypeRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_cancel_metadata_job_empty_call_grpc_asyncio(): - client = CatalogServiceAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_update_aspect_type_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.cancel_metadata_job), "__call__" + type(client.transport.update_aspect_type), "__call__" ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_metadata_job(request=None) + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_aspect_type(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = catalog.CancelMetadataJobRequest() + request_msg = catalog.UpdateAspectTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_aspect_type_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_aspect_type), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_aspect_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteAspectTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_aspect_types_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_aspect_types), "__call__" + ) as call: + call.return_value = catalog.ListAspectTypesResponse() + client.list_aspect_types(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListAspectTypesRequest() assert args[0] == request_msg -def test_transport_kind_rest(): - transport = CatalogServiceClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_aspect_type_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_aspect_type), "__call__") as call: + call.return_value = catalog.AspectType() + client.get_aspect_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetAspectTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_entry_group_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_entry_group_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.UpdateEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_entry_group_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_entry_groups_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), "__call__" + ) as call: + call.return_value = catalog.ListEntryGroupsResponse() + client.list_entry_groups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListEntryGroupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_entry_group_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: + call.return_value = catalog.EntryGroup() + client.get_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_entry_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_entry), "__call__") as call: + call.return_value = catalog.Entry() + client.create_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_entry_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_entry), "__call__") as call: + call.return_value = catalog.Entry() + client.update_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.UpdateEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_entry_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: + call.return_value = catalog.Entry() + client.delete_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_entries_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_entries), "__call__") as call: + call.return_value = catalog.ListEntriesResponse() + client.list_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListEntriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_entry_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_entry), "__call__") as call: + call.return_value = catalog.Entry() + client.get_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_lookup_entry_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.lookup_entry), "__call__") as call: + call.return_value = catalog.Entry() + client.lookup_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.LookupEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_search_entries_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.search_entries), "__call__") as call: + call.return_value = catalog.SearchEntriesResponse() + client.search_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.SearchEntriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_metadata_job_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateMetadataJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_metadata_job_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_metadata_job), "__call__") as call: + call.return_value = catalog.MetadataJob() + client.get_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetMetadataJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_metadata_jobs_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), "__call__" + ) as call: + call.return_value = catalog.ListMetadataJobsResponse() + client.list_metadata_jobs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListMetadataJobsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_cancel_metadata_job_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), "__call__" + ) as call: + call.return_value = None + client.cancel_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CancelMetadataJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_entry_link_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_link), "__call__" + ) as call: + call.return_value = catalog.EntryLink() + client.create_entry_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateEntryLinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_entry_link_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_link), "__call__" + ) as call: + call.return_value = catalog.EntryLink() + client.delete_entry_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteEntryLinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_entry_link_empty_call_grpc(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_entry_link), "__call__") as call: + call.return_value = catalog.EntryLink() + client.get_entry_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetEntryLinkRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = CatalogServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_entry_type_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_entry_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateEntryTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_entry_type_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_entry_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.UpdateEntryTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_entry_type_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_entry_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteEntryTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_entry_types_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_entry_types), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.ListEntryTypesResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], + ) + ) + await client.list_entry_types(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListEntryTypesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_entry_type_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_entry_type), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.EntryType( + name="name_value", + uid="uid_value", + description="description_value", + display_name="display_name_value", + etag="etag_value", + type_aliases=["type_aliases_value"], + platform="platform_value", + system="system_value", + ) + ) + await client.get_entry_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetEntryTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_aspect_type_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_aspect_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_aspect_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateAspectTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_aspect_type_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_aspect_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_aspect_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.UpdateAspectTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_aspect_type_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_aspect_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_aspect_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteAspectTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_aspect_types_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_aspect_types), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.ListAspectTypesResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], + ) + ) + await client.list_aspect_types(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListAspectTypesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_aspect_type_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_aspect_type), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.AspectType( + name="name_value", + uid="uid_value", + description="description_value", + display_name="display_name_value", + etag="etag_value", + transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, + ) + ) + await client.get_aspect_type(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetAspectTypeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_entry_group_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_entry_group_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_entry_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.UpdateEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_entry_group_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_entry_groups_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_entry_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.ListEntryGroupsResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], + ) + ) + await client.list_entry_groups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListEntryGroupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_entry_group_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.EntryGroup( + name="name_value", + uid="uid_value", + description="description_value", + display_name="display_name_value", + etag="etag_value", + transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, + ) + ) + await client.get_entry_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetEntryGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_entry_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_entry), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.Entry( + name="name_value", + entry_type="entry_type_value", + parent_entry="parent_entry_value", + fully_qualified_name="fully_qualified_name_value", + ) + ) + await client.create_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_entry_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_entry), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.Entry( + name="name_value", + entry_type="entry_type_value", + parent_entry="parent_entry_value", + fully_qualified_name="fully_qualified_name_value", + ) + ) + await client.update_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.UpdateEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_entry_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_entry), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.Entry( + name="name_value", + entry_type="entry_type_value", + parent_entry="parent_entry_value", + fully_qualified_name="fully_qualified_name_value", + ) + ) + await client.delete_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_entries_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_entries), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.ListEntriesResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListEntriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_entry_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_entry), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.Entry( + name="name_value", + entry_type="entry_type_value", + parent_entry="parent_entry_value", + fully_qualified_name="fully_qualified_name_value", + ) + ) + await client.get_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_lookup_entry_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.lookup_entry), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.Entry( + name="name_value", + entry_type="entry_type_value", + parent_entry="parent_entry_value", + fully_qualified_name="fully_qualified_name_value", + ) + ) + await client.lookup_entry(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.LookupEntryRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_search_entries_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.search_entries), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.SearchEntriesResponse( + total_size=1086, + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.search_entries(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.SearchEntriesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_metadata_job_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_metadata_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateMetadataJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_metadata_job_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_metadata_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.MetadataJob( + name="name_value", + uid="uid_value", + type_=catalog.MetadataJob.Type.IMPORT, + ) + ) + await client.get_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetMetadataJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_metadata_jobs_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_metadata_jobs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.ListMetadataJobsResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], + ) + ) + await client.list_metadata_jobs(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.ListMetadataJobsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_cancel_metadata_job_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.cancel_metadata_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_metadata_job(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CancelMetadataJobRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_entry_link_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.EntryLink( + name="name_value", + entry_link_type="entry_link_type_value", + ) + ) + await client.create_entry_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateEntryLinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_entry_link_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_link), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.EntryLink( + name="name_value", + entry_link_type="entry_link_type_value", + ) + ) + await client.delete_entry_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteEntryLinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_entry_link_empty_call_grpc_asyncio(): + client = CatalogServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_entry_link), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + catalog.EntryLink( + name="name_value", + entry_link_type="entry_link_type_value", + ) + ) + await client.get_entry_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetEntryLinkRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = CatalogServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_create_entry_type_rest_bad_request( + request_type=catalog.CreateEntryTypeRequest, +): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_entry_type(request) + + +@pytest.mark.parametrize( + "request_type", + [ + catalog.CreateEntryTypeRequest, + dict, + ], +) +def test_create_entry_type_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["entry_type"] = { + "name": "name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "description": "description_value", + "display_name": "display_name_value", + "labels": {}, + "etag": "etag_value", + "type_aliases": ["type_aliases_value1", "type_aliases_value2"], + "platform": "platform_value", + "system": "system_value", + "required_aspects": [{"type_": "type__value"}], + "authorization": {"alternate_use_permission": "alternate_use_permission_value"}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = catalog.CreateEntryTypeRequest.meta.fields["entry_type"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["entry_type"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["entry_type"][field])): + del request_init["entry_type"][field][i][subfield] + else: + del request_init["entry_type"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_entry_type(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_entry_type_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CatalogServiceRestInterceptor, "post_create_entry_type" + ) as post, mock.patch.object( + transports.CatalogServiceRestInterceptor, "post_create_entry_type_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.CatalogServiceRestInterceptor, "pre_create_entry_type" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.CreateEntryTypeRequest.pb(catalog.CreateEntryTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = catalog.CreateEntryTypeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_entry_type( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_entry_type_rest_bad_request( + request_type=catalog.UpdateEntryTypeRequest, +): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "entry_type": {"name": "projects/sample1/locations/sample2/entryTypes/sample3"} + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_entry_type(request) + + +@pytest.mark.parametrize( + "request_type", + [ + catalog.UpdateEntryTypeRequest, + dict, + ], +) +def test_update_entry_type_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "entry_type": {"name": "projects/sample1/locations/sample2/entryTypes/sample3"} + } + request_init["entry_type"] = { + "name": "projects/sample1/locations/sample2/entryTypes/sample3", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "description": "description_value", + "display_name": "display_name_value", + "labels": {}, + "etag": "etag_value", + "type_aliases": ["type_aliases_value1", "type_aliases_value2"], + "platform": "platform_value", + "system": "system_value", + "required_aspects": [{"type_": "type__value"}], + "authorization": {"alternate_use_permission": "alternate_use_permission_value"}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = catalog.UpdateEntryTypeRequest.meta.fields["entry_type"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["entry_type"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["entry_type"][field])): + del request_init["entry_type"][field][i][subfield] + else: + del request_init["entry_type"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_entry_type(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_entry_type_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CatalogServiceRestInterceptor, "post_update_entry_type" + ) as post, mock.patch.object( + transports.CatalogServiceRestInterceptor, "post_update_entry_type_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.CatalogServiceRestInterceptor, "pre_update_entry_type" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.UpdateEntryTypeRequest.pb(catalog.UpdateEntryTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = catalog.UpdateEntryTypeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_entry_type( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_entry_type_rest_bad_request( + request_type=catalog.DeleteEntryTypeRequest, +): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/entryTypes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_entry_type(request) + + +@pytest.mark.parametrize( + "request_type", + [ + catalog.DeleteEntryTypeRequest, + dict, + ], +) +def test_delete_entry_type_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/entryTypes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_entry_type(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_entry_type_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CatalogServiceRestInterceptor, "post_delete_entry_type" + ) as post, mock.patch.object( + transports.CatalogServiceRestInterceptor, "post_delete_entry_type_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.CatalogServiceRestInterceptor, "pre_delete_entry_type" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.DeleteEntryTypeRequest.pb(catalog.DeleteEntryTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = catalog.DeleteEntryTypeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_entry_type( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_entry_types_rest_bad_request(request_type=catalog.ListEntryTypesRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_entry_types(request) + + +@pytest.mark.parametrize( + "request_type", + [ + catalog.ListEntryTypesRequest, + dict, + ], +) +def test_list_entry_types_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = catalog.ListEntryTypesResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.ListEntryTypesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_entry_types(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListEntryTypesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable_locations == ["unreachable_locations_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_entry_types_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CatalogServiceRestInterceptor, "post_list_entry_types" + ) as post, mock.patch.object( + transports.CatalogServiceRestInterceptor, "post_list_entry_types_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.CatalogServiceRestInterceptor, "pre_list_entry_types" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.ListEntryTypesRequest.pb(catalog.ListEntryTypesRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = catalog.ListEntryTypesResponse.to_json( + catalog.ListEntryTypesResponse() + ) + req.return_value.content = return_value + + request = catalog.ListEntryTypesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = catalog.ListEntryTypesResponse() + post_with_metadata.return_value = catalog.ListEntryTypesResponse(), metadata + + client.list_entry_types( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_entry_type_rest_bad_request(request_type=catalog.GetEntryTypeRequest): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/entryTypes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_entry_type(request) + + +@pytest.mark.parametrize( + "request_type", + [ + catalog.GetEntryTypeRequest, + dict, + ], +) +def test_get_entry_type_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/entryTypes/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = catalog.EntryType( + name="name_value", + uid="uid_value", + description="description_value", + display_name="display_name_value", + etag="etag_value", + type_aliases=["type_aliases_value"], + platform="platform_value", + system="system_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.EntryType.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_entry_type(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, catalog.EntryType) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.display_name == "display_name_value" + assert response.etag == "etag_value" + assert response.type_aliases == ["type_aliases_value"] + assert response.platform == "platform_value" + assert response.system == "system_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_entry_type_rest_interceptors(null_interceptor): + transport = transports.CatalogServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CatalogServiceRestInterceptor(), + ) + client = CatalogServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CatalogServiceRestInterceptor, "post_get_entry_type" + ) as post, mock.patch.object( + transports.CatalogServiceRestInterceptor, "post_get_entry_type_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.CatalogServiceRestInterceptor, "pre_get_entry_type" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.GetEntryTypeRequest.pb(catalog.GetEntryTypeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = catalog.EntryType.to_json(catalog.EntryType()) + req.return_value.content = return_value + + request = catalog.GetEntryTypeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = catalog.EntryType() + post_with_metadata.return_value = catalog.EntryType(), metadata + + client.get_entry_type( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() -def test_create_entry_type_rest_bad_request( - request_type=catalog.CreateEntryTypeRequest, +def test_create_aspect_type_rest_bad_request( + request_type=catalog.CreateAspectTypeRequest, ): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -18119,24 +20663,24 @@ def test_create_entry_type_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_entry_type(request) + client.create_aspect_type(request) @pytest.mark.parametrize( "request_type", [ - catalog.CreateEntryTypeRequest, + catalog.CreateAspectTypeRequest, dict, ], ) -def test_create_entry_type_rest_call_success(request_type): +def test_create_aspect_type_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["entry_type"] = { + request_init["aspect_type"] = { "name": "name_value", "uid": "uid_value", "create_time": {"seconds": 751, "nanos": 543}, @@ -18145,18 +20689,37 @@ def test_create_entry_type_rest_call_success(request_type): "display_name": "display_name_value", "labels": {}, "etag": "etag_value", - "type_aliases": ["type_aliases_value1", "type_aliases_value2"], - "platform": "platform_value", - "system": "system_value", - "required_aspects": [{"type_": "type__value"}], "authorization": {"alternate_use_permission": "alternate_use_permission_value"}, + "metadata_template": { + "index": 536, + "name": "name_value", + "type_": "type__value", + "record_fields": {}, + "enum_values": [ + {"index": 536, "name": "name_value", "deprecated": "deprecated_value"} + ], + "map_items": {}, + "array_items": {}, + "type_id": "type_id_value", + "type_ref": "type_ref_value", + "constraints": {"required": True}, + "annotations": { + "deprecated": "deprecated_value", + "display_name": "display_name_value", + "description": "description_value", + "display_order": 1393, + "string_type": "string_type_value", + "string_values": ["string_values_value1", "string_values_value2"], + }, + }, + "transfer_status": 1, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = catalog.CreateEntryTypeRequest.meta.fields["entry_type"] + test_field = catalog.CreateAspectTypeRequest.meta.fields["aspect_type"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -18184,7 +20747,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["entry_type"].items(): # pragma: NO COVER + for field, value in request_init["aspect_type"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -18214,10 +20777,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["entry_type"][field])): - del request_init["entry_type"][field][i][subfield] + for i in range(0, len(request_init["aspect_type"][field])): + del request_init["aspect_type"][field][i][subfield] else: - del request_init["entry_type"][field][subfield] + del request_init["aspect_type"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -18232,14 +20795,14 @@ def get_message_fields(field): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_entry_type(request) + response = client.create_aspect_type(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_entry_type_rest_interceptors(null_interceptor): +def test_create_aspect_type_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18255,16 +20818,19 @@ def test_create_entry_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_create_entry_type" + transports.CatalogServiceRestInterceptor, "post_create_aspect_type" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_create_entry_type_with_metadata" + transports.CatalogServiceRestInterceptor, + "post_create_aspect_type_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_create_entry_type" + transports.CatalogServiceRestInterceptor, "pre_create_aspect_type" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.CreateEntryTypeRequest.pb(catalog.CreateEntryTypeRequest()) + pb_message = catalog.CreateAspectTypeRequest.pb( + catalog.CreateAspectTypeRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -18278,7 +20844,7 @@ def test_create_entry_type_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = catalog.CreateEntryTypeRequest() + request = catalog.CreateAspectTypeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -18287,7 +20853,7 @@ def test_create_entry_type_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.create_entry_type( + client.create_aspect_type( request, metadata=[ ("key", "val"), @@ -18300,15 +20866,17 @@ def test_create_entry_type_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_update_entry_type_rest_bad_request( - request_type=catalog.UpdateEntryTypeRequest, +def test_update_aspect_type_rest_bad_request( + request_type=catalog.UpdateAspectTypeRequest, ): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "entry_type": {"name": "projects/sample1/locations/sample2/entryTypes/sample3"} + "aspect_type": { + "name": "projects/sample1/locations/sample2/aspectTypes/sample3" + } } request = request_type(**request_init) @@ -18324,27 +20892,29 @@ def test_update_entry_type_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_entry_type(request) + client.update_aspect_type(request) @pytest.mark.parametrize( "request_type", [ - catalog.UpdateEntryTypeRequest, + catalog.UpdateAspectTypeRequest, dict, ], ) -def test_update_entry_type_rest_call_success(request_type): +def test_update_aspect_type_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "entry_type": {"name": "projects/sample1/locations/sample2/entryTypes/sample3"} + "aspect_type": { + "name": "projects/sample1/locations/sample2/aspectTypes/sample3" + } } - request_init["entry_type"] = { - "name": "projects/sample1/locations/sample2/entryTypes/sample3", + request_init["aspect_type"] = { + "name": "projects/sample1/locations/sample2/aspectTypes/sample3", "uid": "uid_value", "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, @@ -18352,18 +20922,37 @@ def test_update_entry_type_rest_call_success(request_type): "display_name": "display_name_value", "labels": {}, "etag": "etag_value", - "type_aliases": ["type_aliases_value1", "type_aliases_value2"], - "platform": "platform_value", - "system": "system_value", - "required_aspects": [{"type_": "type__value"}], "authorization": {"alternate_use_permission": "alternate_use_permission_value"}, + "metadata_template": { + "index": 536, + "name": "name_value", + "type_": "type__value", + "record_fields": {}, + "enum_values": [ + {"index": 536, "name": "name_value", "deprecated": "deprecated_value"} + ], + "map_items": {}, + "array_items": {}, + "type_id": "type_id_value", + "type_ref": "type_ref_value", + "constraints": {"required": True}, + "annotations": { + "deprecated": "deprecated_value", + "display_name": "display_name_value", + "description": "description_value", + "display_order": 1393, + "string_type": "string_type_value", + "string_values": ["string_values_value1", "string_values_value2"], + }, + }, + "transfer_status": 1, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = catalog.UpdateEntryTypeRequest.meta.fields["entry_type"] + test_field = catalog.UpdateAspectTypeRequest.meta.fields["aspect_type"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -18391,7 +20980,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["entry_type"].items(): # pragma: NO COVER + for field, value in request_init["aspect_type"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -18421,131 +21010,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["entry_type"][field])): - del request_init["entry_type"][field][i][subfield] + for i in range(0, len(request_init["aspect_type"][field])): + del request_init["aspect_type"][field][i][subfield] else: - del request_init["entry_type"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_entry_type(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_entry_type_rest_interceptors(null_interceptor): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.CatalogServiceRestInterceptor(), - ) - client = CatalogServiceClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_update_entry_type" - ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_update_entry_type_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_update_entry_type" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = catalog.UpdateEntryTypeRequest.pb(catalog.UpdateEntryTypeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = catalog.UpdateEntryTypeRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.update_entry_type( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() - - -def test_delete_entry_type_rest_bad_request( - request_type=catalog.DeleteEntryTypeRequest, -): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/entryTypes/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_entry_type(request) - - -@pytest.mark.parametrize( - "request_type", - [ - catalog.DeleteEntryTypeRequest, - dict, - ], -) -def test_delete_entry_type_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/entryTypes/sample3"} + del request_init["aspect_type"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -18560,14 +21028,14 @@ def test_delete_entry_type_rest_call_success(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_entry_type(request) + response = client.update_aspect_type(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_entry_type_rest_interceptors(null_interceptor): +def test_update_aspect_type_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18583,16 +21051,19 @@ def test_delete_entry_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_delete_entry_type" + transports.CatalogServiceRestInterceptor, "post_update_aspect_type" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_delete_entry_type_with_metadata" + transports.CatalogServiceRestInterceptor, + "post_update_aspect_type_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_delete_entry_type" + transports.CatalogServiceRestInterceptor, "pre_update_aspect_type" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.DeleteEntryTypeRequest.pb(catalog.DeleteEntryTypeRequest()) + pb_message = catalog.UpdateAspectTypeRequest.pb( + catalog.UpdateAspectTypeRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -18606,7 +21077,7 @@ def test_delete_entry_type_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = catalog.DeleteEntryTypeRequest() + request = catalog.UpdateAspectTypeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -18615,7 +21086,7 @@ def test_delete_entry_type_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.delete_entry_type( + client.update_aspect_type( request, metadata=[ ("key", "val"), @@ -18628,12 +21099,14 @@ def test_delete_entry_type_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_list_entry_types_rest_bad_request(request_type=catalog.ListEntryTypesRequest): +def test_delete_aspect_type_rest_bad_request( + request_type=catalog.DeleteAspectTypeRequest, +): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/aspectTypes/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -18648,53 +21121,45 @@ def test_list_entry_types_rest_bad_request(request_type=catalog.ListEntryTypesRe response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_entry_types(request) + client.delete_aspect_type(request) @pytest.mark.parametrize( "request_type", [ - catalog.ListEntryTypesRequest, + catalog.DeleteAspectTypeRequest, dict, ], ) -def test_list_entry_types_rest_call_success(request_type): +def test_delete_aspect_type_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/aspectTypes/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.ListEntryTypesResponse( - next_page_token="next_page_token_value", - unreachable_locations=["unreachable_locations_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.ListEntryTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_entry_types(request) + response = client.delete_aspect_type(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntryTypesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable_locations == ["unreachable_locations_value"] + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_entry_types_rest_interceptors(null_interceptor): +def test_delete_aspect_type_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18708,16 +21173,21 @@ def test_list_entry_types_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_list_entry_types" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CatalogServiceRestInterceptor, "post_delete_aspect_type" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_list_entry_types_with_metadata" + transports.CatalogServiceRestInterceptor, + "post_delete_aspect_type_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_list_entry_types" + transports.CatalogServiceRestInterceptor, "pre_delete_aspect_type" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.ListEntryTypesRequest.pb(catalog.ListEntryTypesRequest()) + pb_message = catalog.DeleteAspectTypeRequest.pb( + catalog.DeleteAspectTypeRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -18728,21 +21198,19 @@ def test_list_entry_types_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.ListEntryTypesResponse.to_json( - catalog.ListEntryTypesResponse() - ) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = catalog.ListEntryTypesRequest() + request = catalog.DeleteAspectTypeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = catalog.ListEntryTypesResponse() - post_with_metadata.return_value = catalog.ListEntryTypesResponse(), metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.list_entry_types( + client.delete_aspect_type( request, metadata=[ ("key", "val"), @@ -18755,12 +21223,14 @@ def test_list_entry_types_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_entry_type_rest_bad_request(request_type=catalog.GetEntryTypeRequest): +def test_list_aspect_types_rest_bad_request( + request_type=catalog.ListAspectTypesRequest, +): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/entryTypes/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -18775,37 +21245,31 @@ def test_get_entry_type_rest_bad_request(request_type=catalog.GetEntryTypeReques response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_entry_type(request) + client.list_aspect_types(request) @pytest.mark.parametrize( "request_type", [ - catalog.GetEntryTypeRequest, + catalog.ListAspectTypesRequest, dict, ], ) -def test_get_entry_type_rest_call_success(request_type): +def test_list_aspect_types_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/entryTypes/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.EntryType( - name="name_value", - uid="uid_value", - description="description_value", - display_name="display_name_value", - etag="etag_value", - type_aliases=["type_aliases_value"], - platform="platform_value", - system="system_value", + return_value = catalog.ListAspectTypesResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], ) # Wrap the value into a proper Response obj @@ -18813,27 +21277,21 @@ def test_get_entry_type_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.EntryType.pb(return_value) + return_value = catalog.ListAspectTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_entry_type(request) + response = client.list_aspect_types(request) # Establish that the response is the type that we expect. - assert isinstance(response, catalog.EntryType) - assert response.name == "name_value" - assert response.uid == "uid_value" - assert response.description == "description_value" - assert response.display_name == "display_name_value" - assert response.etag == "etag_value" - assert response.type_aliases == ["type_aliases_value"] - assert response.platform == "platform_value" - assert response.system == "system_value" + assert isinstance(response, pagers.ListAspectTypesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable_locations == ["unreachable_locations_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_entry_type_rest_interceptors(null_interceptor): +def test_list_aspect_types_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -18847,16 +21305,16 @@ def test_get_entry_type_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_get_entry_type" + transports.CatalogServiceRestInterceptor, "post_list_aspect_types" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_get_entry_type_with_metadata" + transports.CatalogServiceRestInterceptor, "post_list_aspect_types_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_get_entry_type" + transports.CatalogServiceRestInterceptor, "pre_list_aspect_types" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.GetEntryTypeRequest.pb(catalog.GetEntryTypeRequest()) + pb_message = catalog.ListAspectTypesRequest.pb(catalog.ListAspectTypesRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -18867,19 +21325,21 @@ def test_get_entry_type_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.EntryType.to_json(catalog.EntryType()) + return_value = catalog.ListAspectTypesResponse.to_json( + catalog.ListAspectTypesResponse() + ) req.return_value.content = return_value - request = catalog.GetEntryTypeRequest() + request = catalog.ListAspectTypesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = catalog.EntryType() - post_with_metadata.return_value = catalog.EntryType(), metadata + post.return_value = catalog.ListAspectTypesResponse() + post_with_metadata.return_value = catalog.ListAspectTypesResponse(), metadata - client.get_entry_type( + client.list_aspect_types( request, metadata=[ ("key", "val"), @@ -18892,14 +21352,12 @@ def test_get_entry_type_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_create_aspect_type_rest_bad_request( - request_type=catalog.CreateAspectTypeRequest, -): +def test_get_aspect_type_rest_bad_request(request_type=catalog.GetAspectTypeRequest): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/aspectTypes/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -18914,146 +21372,61 @@ def test_create_aspect_type_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_aspect_type(request) + client.get_aspect_type(request) @pytest.mark.parametrize( "request_type", [ - catalog.CreateAspectTypeRequest, + catalog.GetAspectTypeRequest, dict, ], ) -def test_create_aspect_type_rest_call_success(request_type): +def test_get_aspect_type_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["aspect_type"] = { - "name": "name_value", - "uid": "uid_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "description": "description_value", - "display_name": "display_name_value", - "labels": {}, - "etag": "etag_value", - "authorization": {"alternate_use_permission": "alternate_use_permission_value"}, - "metadata_template": { - "index": 536, - "name": "name_value", - "type_": "type__value", - "record_fields": {}, - "enum_values": [ - {"index": 536, "name": "name_value", "deprecated": "deprecated_value"} - ], - "map_items": {}, - "array_items": {}, - "type_id": "type_id_value", - "type_ref": "type_ref_value", - "constraints": {"required": True}, - "annotations": { - "deprecated": "deprecated_value", - "display_name": "display_name_value", - "description": "description_value", - "display_order": 1393, - "string_type": "string_type_value", - "string_values": ["string_values_value1", "string_values_value2"], - }, - }, - "transfer_status": 1, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = catalog.CreateAspectTypeRequest.meta.fields["aspect_type"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["aspect_type"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["aspect_type"][field])): - del request_init["aspect_type"][field][i][subfield] - else: - del request_init["aspect_type"][field][subfield] + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/aspectTypes/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = catalog.AspectType( + name="name_value", + uid="uid_value", + description="description_value", + display_name="display_name_value", + etag="etag_value", + transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.AspectType.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_aspect_type(request) + response = client.get_aspect_type(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, catalog.AspectType) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.display_name == "display_name_value" + assert response.etag == "etag_value" + assert response.transfer_status == catalog.TransferStatus.TRANSFER_STATUS_MIGRATED @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_aspect_type_rest_interceptors(null_interceptor): +def test_get_aspect_type_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -19067,21 +21440,16 @@ def test_create_aspect_type_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_create_aspect_type" + transports.CatalogServiceRestInterceptor, "post_get_aspect_type" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, - "post_create_aspect_type_with_metadata", + transports.CatalogServiceRestInterceptor, "post_get_aspect_type_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_create_aspect_type" + transports.CatalogServiceRestInterceptor, "pre_get_aspect_type" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.CreateAspectTypeRequest.pb( - catalog.CreateAspectTypeRequest() - ) + pb_message = catalog.GetAspectTypeRequest.pb(catalog.GetAspectTypeRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -19092,19 +21460,19 @@ def test_create_aspect_type_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = catalog.AspectType.to_json(catalog.AspectType()) req.return_value.content = return_value - request = catalog.CreateAspectTypeRequest() + request = catalog.GetAspectTypeRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + post.return_value = catalog.AspectType() + post_with_metadata.return_value = catalog.AspectType(), metadata - client.create_aspect_type( + client.get_aspect_type( request, metadata=[ ("key", "val"), @@ -19117,18 +21485,14 @@ def test_create_aspect_type_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_update_aspect_type_rest_bad_request( - request_type=catalog.UpdateAspectTypeRequest, +def test_create_entry_group_rest_bad_request( + request_type=catalog.CreateEntryGroupRequest, ): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "aspect_type": { - "name": "projects/sample1/locations/sample2/aspectTypes/sample3" - } - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19143,29 +21507,25 @@ def test_update_aspect_type_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_aspect_type(request) + client.create_entry_group(request) @pytest.mark.parametrize( "request_type", [ - catalog.UpdateAspectTypeRequest, + catalog.CreateEntryGroupRequest, dict, ], ) -def test_update_aspect_type_rest_call_success(request_type): +def test_create_entry_group_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "aspect_type": { - "name": "projects/sample1/locations/sample2/aspectTypes/sample3" - } - } - request_init["aspect_type"] = { - "name": "projects/sample1/locations/sample2/aspectTypes/sample3", + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["entry_group"] = { + "name": "name_value", "uid": "uid_value", "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, @@ -19173,29 +21533,6 @@ def test_update_aspect_type_rest_call_success(request_type): "display_name": "display_name_value", "labels": {}, "etag": "etag_value", - "authorization": {"alternate_use_permission": "alternate_use_permission_value"}, - "metadata_template": { - "index": 536, - "name": "name_value", - "type_": "type__value", - "record_fields": {}, - "enum_values": [ - {"index": 536, "name": "name_value", "deprecated": "deprecated_value"} - ], - "map_items": {}, - "array_items": {}, - "type_id": "type_id_value", - "type_ref": "type_ref_value", - "constraints": {"required": True}, - "annotations": { - "deprecated": "deprecated_value", - "display_name": "display_name_value", - "description": "description_value", - "display_order": 1393, - "string_type": "string_type_value", - "string_values": ["string_values_value1", "string_values_value2"], - }, - }, "transfer_status": 1, } # The version of a generated dependency at test runtime may differ from the version used during generation. @@ -19203,7 +21540,7 @@ def test_update_aspect_type_rest_call_success(request_type): # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = catalog.UpdateAspectTypeRequest.meta.fields["aspect_type"] + test_field = catalog.CreateEntryGroupRequest.meta.fields["entry_group"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -19231,7 +21568,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["aspect_type"].items(): # pragma: NO COVER + for field, value in request_init["entry_group"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -19261,10 +21598,10 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["aspect_type"][field])): - del request_init["aspect_type"][field][i][subfield] + for i in range(0, len(request_init["entry_group"][field])): + del request_init["entry_group"][field][i][subfield] else: - del request_init["aspect_type"][field][subfield] + del request_init["entry_group"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -19279,14 +21616,14 @@ def get_message_fields(field): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_aspect_type(request) + response = client.create_entry_group(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_aspect_type_rest_interceptors(null_interceptor): +def test_create_entry_group_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -19302,18 +21639,18 @@ def test_update_aspect_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_update_aspect_type" + transports.CatalogServiceRestInterceptor, "post_create_entry_group" ) as post, mock.patch.object( transports.CatalogServiceRestInterceptor, - "post_update_aspect_type_with_metadata", + "post_create_entry_group_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_update_aspect_type" + transports.CatalogServiceRestInterceptor, "pre_create_entry_group" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.UpdateAspectTypeRequest.pb( - catalog.UpdateAspectTypeRequest() + pb_message = catalog.CreateEntryGroupRequest.pb( + catalog.CreateEntryGroupRequest() ) transcode.return_value = { "method": "post", @@ -19328,7 +21665,7 @@ def test_update_aspect_type_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = catalog.UpdateAspectTypeRequest() + request = catalog.CreateEntryGroupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -19337,7 +21674,7 @@ def test_update_aspect_type_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.update_aspect_type( + client.create_entry_group( request, metadata=[ ("key", "val"), @@ -19350,14 +21687,18 @@ def test_update_aspect_type_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_delete_aspect_type_rest_bad_request( - request_type=catalog.DeleteAspectTypeRequest, +def test_update_entry_group_rest_bad_request( + request_type=catalog.UpdateEntryGroupRequest, ): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/aspectTypes/sample3"} + request_init = { + "entry_group": { + "name": "projects/sample1/locations/sample2/entryGroups/sample3" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19372,23 +21713,105 @@ def test_delete_aspect_type_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_aspect_type(request) + client.update_entry_group(request) @pytest.mark.parametrize( "request_type", [ - catalog.DeleteAspectTypeRequest, + catalog.UpdateEntryGroupRequest, dict, ], ) -def test_delete_aspect_type_rest_call_success(request_type): +def test_update_entry_group_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/aspectTypes/sample3"} + request_init = { + "entry_group": { + "name": "projects/sample1/locations/sample2/entryGroups/sample3" + } + } + request_init["entry_group"] = { + "name": "projects/sample1/locations/sample2/entryGroups/sample3", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "description": "description_value", + "display_name": "display_name_value", + "labels": {}, + "etag": "etag_value", + "transfer_status": 1, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = catalog.UpdateEntryGroupRequest.meta.fields["entry_group"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["entry_group"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["entry_group"][field])): + del request_init["entry_group"][field][i][subfield] + else: + del request_init["entry_group"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -19403,14 +21826,14 @@ def test_delete_aspect_type_rest_call_success(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_aspect_type(request) + response = client.update_entry_group(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_aspect_type_rest_interceptors(null_interceptor): +def test_update_entry_group_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -19426,18 +21849,18 @@ def test_delete_aspect_type_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( operation.Operation, "_set_result_from_operation" ), mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_delete_aspect_type" + transports.CatalogServiceRestInterceptor, "post_update_entry_group" ) as post, mock.patch.object( transports.CatalogServiceRestInterceptor, - "post_delete_aspect_type_with_metadata", + "post_update_entry_group_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_delete_aspect_type" + transports.CatalogServiceRestInterceptor, "pre_update_entry_group" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.DeleteAspectTypeRequest.pb( - catalog.DeleteAspectTypeRequest() + pb_message = catalog.UpdateEntryGroupRequest.pb( + catalog.UpdateEntryGroupRequest() ) transcode.return_value = { "method": "post", @@ -19452,7 +21875,7 @@ def test_delete_aspect_type_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = catalog.DeleteAspectTypeRequest() + request = catalog.UpdateEntryGroupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -19461,7 +21884,7 @@ def test_delete_aspect_type_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.delete_aspect_type( + client.update_entry_group( request, metadata=[ ("key", "val"), @@ -19474,14 +21897,14 @@ def test_delete_aspect_type_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_list_aspect_types_rest_bad_request( - request_type=catalog.ListAspectTypesRequest, +def test_delete_entry_group_rest_bad_request( + request_type=catalog.DeleteEntryGroupRequest, ): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/entryGroups/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19496,53 +21919,45 @@ def test_list_aspect_types_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_aspect_types(request) + client.delete_entry_group(request) @pytest.mark.parametrize( "request_type", [ - catalog.ListAspectTypesRequest, + catalog.DeleteEntryGroupRequest, dict, ], ) -def test_list_aspect_types_rest_call_success(request_type): +def test_delete_entry_group_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/entryGroups/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.ListAspectTypesResponse( - next_page_token="next_page_token_value", - unreachable_locations=["unreachable_locations_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.ListAspectTypesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_aspect_types(request) + response = client.delete_entry_group(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAspectTypesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable_locations == ["unreachable_locations_value"] + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_aspect_types_rest_interceptors(null_interceptor): +def test_delete_entry_group_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -19556,16 +21971,21 @@ def test_list_aspect_types_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_list_aspect_types" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CatalogServiceRestInterceptor, "post_delete_entry_group" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_list_aspect_types_with_metadata" + transports.CatalogServiceRestInterceptor, + "post_delete_entry_group_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_list_aspect_types" + transports.CatalogServiceRestInterceptor, "pre_delete_entry_group" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.ListAspectTypesRequest.pb(catalog.ListAspectTypesRequest()) + pb_message = catalog.DeleteEntryGroupRequest.pb( + catalog.DeleteEntryGroupRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -19576,21 +21996,19 @@ def test_list_aspect_types_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.ListAspectTypesResponse.to_json( - catalog.ListAspectTypesResponse() - ) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = catalog.ListAspectTypesRequest() + request = catalog.DeleteEntryGroupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = catalog.ListAspectTypesResponse() - post_with_metadata.return_value = catalog.ListAspectTypesResponse(), metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.list_aspect_types( + client.delete_entry_group( request, metadata=[ ("key", "val"), @@ -19603,12 +22021,14 @@ def test_list_aspect_types_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_aspect_type_rest_bad_request(request_type=catalog.GetAspectTypeRequest): +def test_list_entry_groups_rest_bad_request( + request_type=catalog.ListEntryGroupsRequest, +): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/aspectTypes/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19623,35 +22043,31 @@ def test_get_aspect_type_rest_bad_request(request_type=catalog.GetAspectTypeRequ response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_aspect_type(request) + client.list_entry_groups(request) @pytest.mark.parametrize( "request_type", [ - catalog.GetAspectTypeRequest, + catalog.ListEntryGroupsRequest, dict, ], ) -def test_get_aspect_type_rest_call_success(request_type): +def test_list_entry_groups_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/aspectTypes/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.AspectType( - name="name_value", - uid="uid_value", - description="description_value", - display_name="display_name_value", - etag="etag_value", - transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, + return_value = catalog.ListEntryGroupsResponse( + next_page_token="next_page_token_value", + unreachable_locations=["unreachable_locations_value"], ) # Wrap the value into a proper Response obj @@ -19659,25 +22075,21 @@ def test_get_aspect_type_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.AspectType.pb(return_value) + return_value = catalog.ListEntryGroupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_aspect_type(request) + response = client.list_entry_groups(request) # Establish that the response is the type that we expect. - assert isinstance(response, catalog.AspectType) - assert response.name == "name_value" - assert response.uid == "uid_value" - assert response.description == "description_value" - assert response.display_name == "display_name_value" - assert response.etag == "etag_value" - assert response.transfer_status == catalog.TransferStatus.TRANSFER_STATUS_MIGRATED + assert isinstance(response, pagers.ListEntryGroupsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable_locations == ["unreachable_locations_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_aspect_type_rest_interceptors(null_interceptor): +def test_list_entry_groups_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -19691,16 +22103,16 @@ def test_get_aspect_type_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_get_aspect_type" + transports.CatalogServiceRestInterceptor, "post_list_entry_groups" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_get_aspect_type_with_metadata" + transports.CatalogServiceRestInterceptor, "post_list_entry_groups_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_get_aspect_type" + transports.CatalogServiceRestInterceptor, "pre_list_entry_groups" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.GetAspectTypeRequest.pb(catalog.GetAspectTypeRequest()) + pb_message = catalog.ListEntryGroupsRequest.pb(catalog.ListEntryGroupsRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -19711,19 +22123,21 @@ def test_get_aspect_type_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.AspectType.to_json(catalog.AspectType()) + return_value = catalog.ListEntryGroupsResponse.to_json( + catalog.ListEntryGroupsResponse() + ) req.return_value.content = return_value - request = catalog.GetAspectTypeRequest() + request = catalog.ListEntryGroupsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = catalog.AspectType() - post_with_metadata.return_value = catalog.AspectType(), metadata + post.return_value = catalog.ListEntryGroupsResponse() + post_with_metadata.return_value = catalog.ListEntryGroupsResponse(), metadata - client.get_aspect_type( + client.list_entry_groups( request, metadata=[ ("key", "val"), @@ -19736,14 +22150,12 @@ def test_get_aspect_type_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_create_entry_group_rest_bad_request( - request_type=catalog.CreateEntryGroupRequest, -): +def test_get_entry_group_rest_bad_request(request_type=catalog.GetEntryGroupRequest): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/entryGroups/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19758,123 +22170,61 @@ def test_create_entry_group_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_entry_group(request) + client.get_entry_group(request) @pytest.mark.parametrize( "request_type", [ - catalog.CreateEntryGroupRequest, + catalog.GetEntryGroupRequest, dict, ], ) -def test_create_entry_group_rest_call_success(request_type): +def test_get_entry_group_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["entry_group"] = { - "name": "name_value", - "uid": "uid_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "description": "description_value", - "display_name": "display_name_value", - "labels": {}, - "etag": "etag_value", - "transfer_status": 1, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = catalog.CreateEntryGroupRequest.meta.fields["entry_group"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["entry_group"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["entry_group"][field])): - del request_init["entry_group"][field][i][subfield] - else: - del request_init["entry_group"][field][subfield] + request_init = {"name": "projects/sample1/locations/sample2/entryGroups/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = catalog.EntryGroup( + name="name_value", + uid="uid_value", + description="description_value", + display_name="display_name_value", + etag="etag_value", + transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.EntryGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_entry_group(request) + response = client.get_entry_group(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, catalog.EntryGroup) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.description == "description_value" + assert response.display_name == "display_name_value" + assert response.etag == "etag_value" + assert response.transfer_status == catalog.TransferStatus.TRANSFER_STATUS_MIGRATED @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_entry_group_rest_interceptors(null_interceptor): +def test_get_entry_group_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -19888,21 +22238,16 @@ def test_create_entry_group_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_create_entry_group" + transports.CatalogServiceRestInterceptor, "post_get_entry_group" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, - "post_create_entry_group_with_metadata", + transports.CatalogServiceRestInterceptor, "post_get_entry_group_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_create_entry_group" + transports.CatalogServiceRestInterceptor, "pre_get_entry_group" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.CreateEntryGroupRequest.pb( - catalog.CreateEntryGroupRequest() - ) + pb_message = catalog.GetEntryGroupRequest.pb(catalog.GetEntryGroupRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -19913,19 +22258,19 @@ def test_create_entry_group_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = catalog.EntryGroup.to_json(catalog.EntryGroup()) req.return_value.content = return_value - request = catalog.CreateEntryGroupRequest() + request = catalog.GetEntryGroupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + post.return_value = catalog.EntryGroup() + post_with_metadata.return_value = catalog.EntryGroup(), metadata - client.create_entry_group( + client.get_entry_group( request, metadata=[ ("key", "val"), @@ -19938,18 +22283,12 @@ def test_create_entry_group_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_update_entry_group_rest_bad_request( - request_type=catalog.UpdateEntryGroupRequest, -): +def test_create_entry_rest_bad_request(request_type=catalog.CreateEntryRequest): client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = { - "entry_group": { - "name": "projects/sample1/locations/sample2/entryGroups/sample3" - } - } + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/entryGroups/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19964,44 +22303,50 @@ def test_update_entry_group_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_entry_group(request) + client.create_entry(request) @pytest.mark.parametrize( "request_type", [ - catalog.UpdateEntryGroupRequest, + catalog.CreateEntryRequest, dict, ], ) -def test_update_entry_group_rest_call_success(request_type): +def test_create_entry_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "entry_group": { - "name": "projects/sample1/locations/sample2/entryGroups/sample3" - } - } - request_init["entry_group"] = { - "name": "projects/sample1/locations/sample2/entryGroups/sample3", - "uid": "uid_value", + request_init = {"parent": "projects/sample1/locations/sample2/entryGroups/sample3"} + request_init["entry"] = { + "name": "name_value", + "entry_type": "entry_type_value", "create_time": {"seconds": 751, "nanos": 543}, "update_time": {}, - "description": "description_value", - "display_name": "display_name_value", - "labels": {}, - "etag": "etag_value", - "transfer_status": 1, + "aspects": {}, + "parent_entry": "parent_entry_value", + "fully_qualified_name": "fully_qualified_name_value", + "entry_source": { + "resource": "resource_value", + "system": "system_value", + "platform": "platform_value", + "display_name": "display_name_value", + "description": "description_value", + "labels": {}, + "ancestors": [{"name": "name_value", "type_": "type__value"}], + "create_time": {}, + "update_time": {}, + "location": "location_value", + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 # Determine if the message type is proto-plus or protobuf - test_field = catalog.UpdateEntryGroupRequest.meta.fields["entry_group"] + test_field = catalog.CreateEntryRequest.meta.fields["entry"] def get_message_fields(field): # Given a field which is a message (composite type), return a list with @@ -20029,7 +22374,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["entry_group"].items(): # pragma: NO COVER + for field, value in request_init["entry"].items(): # pragma: NO COVER result = None is_repeated = False # For repeated fields @@ -20059,32 +22404,44 @@ def get_message_fields(field): subfield = subfield_to_delete.get("subfield") if subfield: if field_repeated: - for i in range(0, len(request_init["entry_group"][field])): - del request_init["entry_group"][field][i][subfield] + for i in range(0, len(request_init["entry"][field])): + del request_init["entry"][field][i][subfield] else: - del request_init["entry_group"][field][subfield] + del request_init["entry"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = catalog.Entry( + name="name_value", + entry_type="entry_type_value", + parent_entry="parent_entry_value", + fully_qualified_name="fully_qualified_name_value", + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = catalog.Entry.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_entry_group(request) + response = client.create_entry(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, catalog.Entry) + assert response.name == "name_value" + assert response.entry_type == "entry_type_value" + assert response.parent_entry == "parent_entry_value" + assert response.fully_qualified_name == "fully_qualified_name_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_entry_group_rest_interceptors(null_interceptor): +def test_create_entry_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20098,21 +22455,16 @@ def test_update_entry_group_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_update_entry_group" + transports.CatalogServiceRestInterceptor, "post_create_entry" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, - "post_update_entry_group_with_metadata", + transports.CatalogServiceRestInterceptor, "post_create_entry_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_update_entry_group" + transports.CatalogServiceRestInterceptor, "pre_create_entry" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.UpdateEntryGroupRequest.pb( - catalog.UpdateEntryGroupRequest() - ) + pb_message = catalog.CreateEntryRequest.pb(catalog.CreateEntryRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -20123,19 +22475,19 @@ def test_update_entry_group_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = catalog.Entry.to_json(catalog.Entry()) req.return_value.content = return_value - request = catalog.UpdateEntryGroupRequest() + request = catalog.CreateEntryRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + post.return_value = catalog.Entry() + post_with_metadata.return_value = catalog.Entry(), metadata - client.update_entry_group( + client.create_entry( request, metadata=[ ("key", "val"), @@ -20148,14 +22500,16 @@ def test_update_entry_group_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_delete_entry_group_rest_bad_request( - request_type=catalog.DeleteEntryGroupRequest, -): +def test_update_entry_rest_bad_request(request_type=catalog.UpdateEntryRequest): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/entryGroups/sample3"} + request_init = { + "entry": { + "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20170,155 +22524,125 @@ def test_delete_entry_group_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_entry_group(request) + client.update_entry(request) @pytest.mark.parametrize( "request_type", [ - catalog.DeleteEntryGroupRequest, + catalog.UpdateEntryRequest, dict, ], ) -def test_delete_entry_group_rest_call_success(request_type): +def test_update_entry_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/entryGroups/sample3"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # Wrap the value into a proper Response obj - response_value = mock.Mock() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value.content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_entry_group(request) - - # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) - - -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_entry_group_rest_interceptors(null_interceptor): - transport = transports.CatalogServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.CatalogServiceRestInterceptor(), - ) - client = CatalogServiceClient(transport=transport) - - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_delete_entry_group" - ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, - "post_delete_entry_group_with_metadata", - ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_delete_entry_group" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = catalog.DeleteEntryGroupRequest.pb( - catalog.DeleteEntryGroupRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, + request_init = { + "entry": { + "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4" } + } + request_init["entry"] = { + "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4", + "entry_type": "entry_type_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "aspects": {}, + "parent_entry": "parent_entry_value", + "fully_qualified_name": "fully_qualified_name_value", + "entry_source": { + "resource": "resource_value", + "system": "system_value", + "platform": "platform_value", + "display_name": "display_name_value", + "description": "description_value", + "labels": {}, + "ancestors": [{"name": "name_value", "type_": "type__value"}], + "create_time": {}, + "update_time": {}, + "location": "location_value", + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 - req.return_value = mock.Mock() - req.return_value.status_code = 200 - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - - request = catalog.DeleteEntryGroupRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - - client.delete_entry_group( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() + # Determine if the message type is proto-plus or protobuf + test_field = catalog.UpdateEntryRequest.meta.fields["entry"] + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] -def test_list_entry_groups_rest_bad_request( - request_type=catalog.ListEntryGroupsRequest, -): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_entry_groups(request) + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] -@pytest.mark.parametrize( - "request_type", - [ - catalog.ListEntryGroupsRequest, - dict, - ], -) -def test_list_entry_groups_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + subfields_not_in_runtime = [] - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["entry"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["entry"][field])): + del request_init["entry"][field][i][subfield] + else: + del request_init["entry"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.ListEntryGroupsResponse( - next_page_token="next_page_token_value", - unreachable_locations=["unreachable_locations_value"], + return_value = catalog.Entry( + name="name_value", + entry_type="entry_type_value", + parent_entry="parent_entry_value", + fully_qualified_name="fully_qualified_name_value", ) # Wrap the value into a proper Response obj @@ -20326,21 +22650,23 @@ def test_list_entry_groups_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.ListEntryGroupsResponse.pb(return_value) + return_value = catalog.Entry.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_entry_groups(request) + response = client.update_entry(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntryGroupsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable_locations == ["unreachable_locations_value"] + assert isinstance(response, catalog.Entry) + assert response.name == "name_value" + assert response.entry_type == "entry_type_value" + assert response.parent_entry == "parent_entry_value" + assert response.fully_qualified_name == "fully_qualified_name_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_entry_groups_rest_interceptors(null_interceptor): +def test_update_entry_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20354,16 +22680,16 @@ def test_list_entry_groups_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_list_entry_groups" + transports.CatalogServiceRestInterceptor, "post_update_entry" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_list_entry_groups_with_metadata" + transports.CatalogServiceRestInterceptor, "post_update_entry_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_list_entry_groups" + transports.CatalogServiceRestInterceptor, "pre_update_entry" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.ListEntryGroupsRequest.pb(catalog.ListEntryGroupsRequest()) + pb_message = catalog.UpdateEntryRequest.pb(catalog.UpdateEntryRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -20374,21 +22700,19 @@ def test_list_entry_groups_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.ListEntryGroupsResponse.to_json( - catalog.ListEntryGroupsResponse() - ) + return_value = catalog.Entry.to_json(catalog.Entry()) req.return_value.content = return_value - request = catalog.ListEntryGroupsRequest() + request = catalog.UpdateEntryRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = catalog.ListEntryGroupsResponse() - post_with_metadata.return_value = catalog.ListEntryGroupsResponse(), metadata + post.return_value = catalog.Entry() + post_with_metadata.return_value = catalog.Entry(), metadata - client.list_entry_groups( + client.update_entry( request, metadata=[ ("key", "val"), @@ -20401,12 +22725,14 @@ def test_list_entry_groups_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_entry_group_rest_bad_request(request_type=catalog.GetEntryGroupRequest): +def test_delete_entry_rest_bad_request(request_type=catalog.DeleteEntryRequest): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/entryGroups/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20421,35 +22747,35 @@ def test_get_entry_group_rest_bad_request(request_type=catalog.GetEntryGroupRequ response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_entry_group(request) + client.delete_entry(request) @pytest.mark.parametrize( "request_type", [ - catalog.GetEntryGroupRequest, + catalog.DeleteEntryRequest, dict, ], ) -def test_get_entry_group_rest_call_success(request_type): +def test_delete_entry_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/entryGroups/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.EntryGroup( + return_value = catalog.Entry( name="name_value", - uid="uid_value", - description="description_value", - display_name="display_name_value", - etag="etag_value", - transfer_status=catalog.TransferStatus.TRANSFER_STATUS_MIGRATED, + entry_type="entry_type_value", + parent_entry="parent_entry_value", + fully_qualified_name="fully_qualified_name_value", ) # Wrap the value into a proper Response obj @@ -20457,25 +22783,23 @@ def test_get_entry_group_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.EntryGroup.pb(return_value) + return_value = catalog.Entry.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_entry_group(request) + response = client.delete_entry(request) # Establish that the response is the type that we expect. - assert isinstance(response, catalog.EntryGroup) + assert isinstance(response, catalog.Entry) assert response.name == "name_value" - assert response.uid == "uid_value" - assert response.description == "description_value" - assert response.display_name == "display_name_value" - assert response.etag == "etag_value" - assert response.transfer_status == catalog.TransferStatus.TRANSFER_STATUS_MIGRATED + assert response.entry_type == "entry_type_value" + assert response.parent_entry == "parent_entry_value" + assert response.fully_qualified_name == "fully_qualified_name_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_entry_group_rest_interceptors(null_interceptor): +def test_delete_entry_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20489,16 +22813,16 @@ def test_get_entry_group_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_get_entry_group" + transports.CatalogServiceRestInterceptor, "post_delete_entry" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_get_entry_group_with_metadata" + transports.CatalogServiceRestInterceptor, "post_delete_entry_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_get_entry_group" + transports.CatalogServiceRestInterceptor, "pre_delete_entry" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.GetEntryGroupRequest.pb(catalog.GetEntryGroupRequest()) + pb_message = catalog.DeleteEntryRequest.pb(catalog.DeleteEntryRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -20509,19 +22833,19 @@ def test_get_entry_group_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.EntryGroup.to_json(catalog.EntryGroup()) + return_value = catalog.Entry.to_json(catalog.Entry()) req.return_value.content = return_value - request = catalog.GetEntryGroupRequest() + request = catalog.DeleteEntryRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = catalog.EntryGroup() - post_with_metadata.return_value = catalog.EntryGroup(), metadata + post.return_value = catalog.Entry() + post_with_metadata.return_value = catalog.Entry(), metadata - client.get_entry_group( + client.delete_entry( request, metadata=[ ("key", "val"), @@ -20534,7 +22858,7 @@ def test_get_entry_group_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_create_entry_rest_bad_request(request_type=catalog.CreateEntryRequest): +def test_list_entries_rest_bad_request(request_type=catalog.ListEntriesRequest): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -20551,124 +22875,33 @@ def test_create_entry_rest_bad_request(request_type=catalog.CreateEntryRequest): json_return_value = "" response_value.json = mock.Mock(return_value={}) response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_entry(request) - - -@pytest.mark.parametrize( - "request_type", - [ - catalog.CreateEntryRequest, - dict, - ], -) -def test_create_entry_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/entryGroups/sample3"} - request_init["entry"] = { - "name": "name_value", - "entry_type": "entry_type_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "aspects": {}, - "parent_entry": "parent_entry_value", - "fully_qualified_name": "fully_qualified_name_value", - "entry_source": { - "resource": "resource_value", - "system": "system_value", - "platform": "platform_value", - "display_name": "display_name_value", - "description": "description_value", - "labels": {}, - "ancestors": [{"name": "name_value", "type_": "type__value"}], - "create_time": {}, - "update_time": {}, - "location": "location_value", - }, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = catalog.CreateEntryRequest.meta.fields["entry"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["entry"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_entries(request) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["entry"][field])): - del request_init["entry"][field][i][subfield] - else: - del request_init["entry"][field][subfield] + +@pytest.mark.parametrize( + "request_type", + [ + catalog.ListEntriesRequest, + dict, + ], +) +def test_list_entries_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/entryGroups/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.Entry( - name="name_value", - entry_type="entry_type_value", - parent_entry="parent_entry_value", - fully_qualified_name="fully_qualified_name_value", + return_value = catalog.ListEntriesResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -20676,23 +22909,20 @@ def get_message_fields(field): response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.Entry.pb(return_value) + return_value = catalog.ListEntriesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_entry(request) + response = client.list_entries(request) # Establish that the response is the type that we expect. - assert isinstance(response, catalog.Entry) - assert response.name == "name_value" - assert response.entry_type == "entry_type_value" - assert response.parent_entry == "parent_entry_value" - assert response.fully_qualified_name == "fully_qualified_name_value" + assert isinstance(response, pagers.ListEntriesPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_entry_rest_interceptors(null_interceptor): +def test_list_entries_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20706,16 +22936,16 @@ def test_create_entry_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_create_entry" + transports.CatalogServiceRestInterceptor, "post_list_entries" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_create_entry_with_metadata" + transports.CatalogServiceRestInterceptor, "post_list_entries_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_create_entry" + transports.CatalogServiceRestInterceptor, "pre_list_entries" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.CreateEntryRequest.pb(catalog.CreateEntryRequest()) + pb_message = catalog.ListEntriesRequest.pb(catalog.ListEntriesRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -20726,19 +22956,21 @@ def test_create_entry_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.Entry.to_json(catalog.Entry()) + return_value = catalog.ListEntriesResponse.to_json( + catalog.ListEntriesResponse() + ) req.return_value.content = return_value - request = catalog.CreateEntryRequest() + request = catalog.ListEntriesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = catalog.Entry() - post_with_metadata.return_value = catalog.Entry(), metadata + post.return_value = catalog.ListEntriesResponse() + post_with_metadata.return_value = catalog.ListEntriesResponse(), metadata - client.create_entry( + client.list_entries( request, metadata=[ ("key", "val"), @@ -20751,15 +22983,13 @@ def test_create_entry_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_update_entry_rest_bad_request(request_type=catalog.UpdateEntryRequest): +def test_get_entry_rest_bad_request(request_type=catalog.GetEntryRequest): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "entry": { - "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4" - } + "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4" } request = request_type(**request_init) @@ -20775,115 +23005,25 @@ def test_update_entry_rest_bad_request(request_type=catalog.UpdateEntryRequest): response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_entry(request) + client.get_entry(request) @pytest.mark.parametrize( "request_type", [ - catalog.UpdateEntryRequest, + catalog.GetEntryRequest, dict, ], ) -def test_update_entry_rest_call_success(request_type): +def test_get_entry_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "entry": { - "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4" - } - } - request_init["entry"] = { - "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4", - "entry_type": "entry_type_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "aspects": {}, - "parent_entry": "parent_entry_value", - "fully_qualified_name": "fully_qualified_name_value", - "entry_source": { - "resource": "resource_value", - "system": "system_value", - "platform": "platform_value", - "display_name": "display_name_value", - "description": "description_value", - "labels": {}, - "ancestors": [{"name": "name_value", "type_": "type__value"}], - "create_time": {}, - "update_time": {}, - "location": "location_value", - }, + "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = catalog.UpdateEntryRequest.meta.fields["entry"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["entry"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["entry"][field])): - del request_init["entry"][field][i][subfield] - else: - del request_init["entry"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -20906,7 +23046,7 @@ def get_message_fields(field): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_entry(request) + response = client.get_entry(request) # Establish that the response is the type that we expect. assert isinstance(response, catalog.Entry) @@ -20917,7 +23057,7 @@ def get_message_fields(field): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_entry_rest_interceptors(null_interceptor): +def test_get_entry_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -20931,16 +23071,16 @@ def test_update_entry_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_update_entry" + transports.CatalogServiceRestInterceptor, "post_get_entry" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_update_entry_with_metadata" + transports.CatalogServiceRestInterceptor, "post_get_entry_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_update_entry" + transports.CatalogServiceRestInterceptor, "pre_get_entry" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.UpdateEntryRequest.pb(catalog.UpdateEntryRequest()) + pb_message = catalog.GetEntryRequest.pb(catalog.GetEntryRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -20954,7 +23094,7 @@ def test_update_entry_rest_interceptors(null_interceptor): return_value = catalog.Entry.to_json(catalog.Entry()) req.return_value.content = return_value - request = catalog.UpdateEntryRequest() + request = catalog.GetEntryRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -20963,7 +23103,7 @@ def test_update_entry_rest_interceptors(null_interceptor): post.return_value = catalog.Entry() post_with_metadata.return_value = catalog.Entry(), metadata - client.update_entry( + client.get_entry( request, metadata=[ ("key", "val"), @@ -20976,14 +23116,12 @@ def test_update_entry_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_delete_entry_rest_bad_request(request_type=catalog.DeleteEntryRequest): +def test_lookup_entry_rest_bad_request(request_type=catalog.LookupEntryRequest): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4" - } + request_init = {"name": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20998,25 +23136,23 @@ def test_delete_entry_rest_bad_request(request_type=catalog.DeleteEntryRequest): response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_entry(request) + client.lookup_entry(request) @pytest.mark.parametrize( "request_type", [ - catalog.DeleteEntryRequest, + catalog.LookupEntryRequest, dict, ], ) -def test_delete_entry_rest_call_success(request_type): +def test_lookup_entry_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4" - } + request_init = {"name": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -21039,7 +23175,7 @@ def test_delete_entry_rest_call_success(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_entry(request) + response = client.lookup_entry(request) # Establish that the response is the type that we expect. assert isinstance(response, catalog.Entry) @@ -21050,7 +23186,7 @@ def test_delete_entry_rest_call_success(request_type): @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_entry_rest_interceptors(null_interceptor): +def test_lookup_entry_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21064,16 +23200,16 @@ def test_delete_entry_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_delete_entry" + transports.CatalogServiceRestInterceptor, "post_lookup_entry" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_delete_entry_with_metadata" + transports.CatalogServiceRestInterceptor, "post_lookup_entry_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_delete_entry" + transports.CatalogServiceRestInterceptor, "pre_lookup_entry" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.DeleteEntryRequest.pb(catalog.DeleteEntryRequest()) + pb_message = catalog.LookupEntryRequest.pb(catalog.LookupEntryRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -21087,7 +23223,7 @@ def test_delete_entry_rest_interceptors(null_interceptor): return_value = catalog.Entry.to_json(catalog.Entry()) req.return_value.content = return_value - request = catalog.DeleteEntryRequest() + request = catalog.LookupEntryRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -21096,7 +23232,7 @@ def test_delete_entry_rest_interceptors(null_interceptor): post.return_value = catalog.Entry() post_with_metadata.return_value = catalog.Entry(), metadata - client.delete_entry( + client.lookup_entry( request, metadata=[ ("key", "val"), @@ -21109,12 +23245,12 @@ def test_delete_entry_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_list_entries_rest_bad_request(request_type=catalog.ListEntriesRequest): +def test_search_entries_rest_bad_request(request_type=catalog.SearchEntriesRequest): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/entryGroups/sample3"} + request_init = {"name": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21129,30 +23265,32 @@ def test_list_entries_rest_bad_request(request_type=catalog.ListEntriesRequest): response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_entries(request) + client.search_entries(request) @pytest.mark.parametrize( "request_type", [ - catalog.ListEntriesRequest, + catalog.SearchEntriesRequest, dict, ], ) -def test_list_entries_rest_call_success(request_type): +def test_search_entries_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/entryGroups/sample3"} + request_init = {"name": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.ListEntriesResponse( + return_value = catalog.SearchEntriesResponse( + total_size=1086, next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -21160,20 +23298,22 @@ def test_list_entries_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.ListEntriesResponse.pb(return_value) + return_value = catalog.SearchEntriesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_entries(request) + response = client.search_entries(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListEntriesPager) + assert isinstance(response, pagers.SearchEntriesPager) + assert response.total_size == 1086 assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_entries_rest_interceptors(null_interceptor): +def test_search_entries_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21187,16 +23327,16 @@ def test_list_entries_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_list_entries" + transports.CatalogServiceRestInterceptor, "post_search_entries" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_list_entries_with_metadata" + transports.CatalogServiceRestInterceptor, "post_search_entries_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_list_entries" + transports.CatalogServiceRestInterceptor, "pre_search_entries" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.ListEntriesRequest.pb(catalog.ListEntriesRequest()) + pb_message = catalog.SearchEntriesRequest.pb(catalog.SearchEntriesRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -21207,21 +23347,21 @@ def test_list_entries_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.ListEntriesResponse.to_json( - catalog.ListEntriesResponse() + return_value = catalog.SearchEntriesResponse.to_json( + catalog.SearchEntriesResponse() ) req.return_value.content = return_value - request = catalog.ListEntriesRequest() + request = catalog.SearchEntriesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = catalog.ListEntriesResponse() - post_with_metadata.return_value = catalog.ListEntriesResponse(), metadata + post.return_value = catalog.SearchEntriesResponse() + post_with_metadata.return_value = catalog.SearchEntriesResponse(), metadata - client.list_entries( + client.search_entries( request, metadata=[ ("key", "val"), @@ -21234,14 +23374,14 @@ def test_list_entries_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_entry_rest_bad_request(request_type=catalog.GetEntryRequest): +def test_create_metadata_job_rest_bad_request( + request_type=catalog.CreateMetadataJobRequest, +): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21256,59 +23396,172 @@ def test_get_entry_rest_bad_request(request_type=catalog.GetEntryRequest): response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_entry(request) + client.create_metadata_job(request) @pytest.mark.parametrize( "request_type", [ - catalog.GetEntryRequest, + catalog.CreateMetadataJobRequest, dict, ], ) -def test_get_entry_rest_call_success(request_type): +def test_create_metadata_job_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/entryGroups/sample3/entries/sample4" + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["metadata_job"] = { + "name": "name_value", + "uid": "uid_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "type_": 1, + "import_spec": { + "source_storage_uri": "source_storage_uri_value", + "source_create_time": {}, + "scope": { + "entry_groups": ["entry_groups_value1", "entry_groups_value2"], + "entry_types": ["entry_types_value1", "entry_types_value2"], + "aspect_types": ["aspect_types_value1", "aspect_types_value2"], + "glossaries": ["glossaries_value1", "glossaries_value2"], + "entry_link_types": [ + "entry_link_types_value1", + "entry_link_types_value2", + ], + "referenced_entry_scopes": [ + "referenced_entry_scopes_value1", + "referenced_entry_scopes_value2", + ], + }, + "entry_sync_mode": 1, + "aspect_sync_mode": 1, + "log_level": 1, + }, + "export_spec": { + "scope": { + "organization_level": True, + "projects": ["projects_value1", "projects_value2"], + "entry_groups": ["entry_groups_value1", "entry_groups_value2"], + "entry_types": ["entry_types_value1", "entry_types_value2"], + "aspect_types": ["aspect_types_value1", "aspect_types_value2"], + }, + "output_path": "output_path_value", + }, + "import_result": { + "deleted_entries": 1584, + "updated_entries": 1600, + "created_entries": 1585, + "unchanged_entries": 1798, + "recreated_entries": 1800, + "update_time": {}, + "deleted_entry_links": 2024, + "created_entry_links": 2025, + "unchanged_entry_links": 2238, + }, + "export_result": { + "exported_entries": 1732, + "error_message": "error_message_value", + }, + "status": { + "state": 1, + "message": "message_value", + "completion_percent": 1930, + "update_time": {}, + }, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = catalog.CreateMetadataJobRequest.meta.fields["metadata_job"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["metadata_job"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["metadata_job"][field])): + del request_init["metadata_job"][field][i][subfield] + else: + del request_init["metadata_job"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.Entry( - name="name_value", - entry_type="entry_type_value", - parent_entry="parent_entry_value", - fully_qualified_name="fully_qualified_name_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = catalog.Entry.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_entry(request) + response = client.create_metadata_job(request) # Establish that the response is the type that we expect. - assert isinstance(response, catalog.Entry) - assert response.name == "name_value" - assert response.entry_type == "entry_type_value" - assert response.parent_entry == "parent_entry_value" - assert response.fully_qualified_name == "fully_qualified_name_value" + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_entry_rest_interceptors(null_interceptor): +def test_create_metadata_job_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21322,16 +23575,21 @@ def test_get_entry_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_get_entry" + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CatalogServiceRestInterceptor, "post_create_metadata_job" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_get_entry_with_metadata" + transports.CatalogServiceRestInterceptor, + "post_create_metadata_job_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_get_entry" + transports.CatalogServiceRestInterceptor, "pre_create_metadata_job" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.GetEntryRequest.pb(catalog.GetEntryRequest()) + pb_message = catalog.CreateMetadataJobRequest.pb( + catalog.CreateMetadataJobRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -21342,19 +23600,19 @@ def test_get_entry_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.Entry.to_json(catalog.Entry()) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = catalog.GetEntryRequest() + request = catalog.CreateMetadataJobRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = catalog.Entry() - post_with_metadata.return_value = catalog.Entry(), metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.get_entry( + client.create_metadata_job( request, metadata=[ ("key", "val"), @@ -21367,12 +23625,12 @@ def test_get_entry_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_lookup_entry_rest_bad_request(request_type=catalog.LookupEntryRequest): +def test_get_metadata_job_rest_bad_request(request_type=catalog.GetMetadataJobRequest): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/metadataJobs/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21387,33 +23645,32 @@ def test_lookup_entry_rest_bad_request(request_type=catalog.LookupEntryRequest): response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.lookup_entry(request) + client.get_metadata_job(request) @pytest.mark.parametrize( "request_type", [ - catalog.LookupEntryRequest, + catalog.GetMetadataJobRequest, dict, ], ) -def test_lookup_entry_rest_call_success(request_type): +def test_get_metadata_job_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/metadataJobs/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.Entry( + return_value = catalog.MetadataJob( name="name_value", - entry_type="entry_type_value", - parent_entry="parent_entry_value", - fully_qualified_name="fully_qualified_name_value", + uid="uid_value", + type_=catalog.MetadataJob.Type.IMPORT, ) # Wrap the value into a proper Response obj @@ -21421,23 +23678,22 @@ def test_lookup_entry_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.Entry.pb(return_value) + return_value = catalog.MetadataJob.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.lookup_entry(request) + response = client.get_metadata_job(request) # Establish that the response is the type that we expect. - assert isinstance(response, catalog.Entry) + assert isinstance(response, catalog.MetadataJob) assert response.name == "name_value" - assert response.entry_type == "entry_type_value" - assert response.parent_entry == "parent_entry_value" - assert response.fully_qualified_name == "fully_qualified_name_value" + assert response.uid == "uid_value" + assert response.type_ == catalog.MetadataJob.Type.IMPORT @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_lookup_entry_rest_interceptors(null_interceptor): +def test_get_metadata_job_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21451,16 +23707,16 @@ def test_lookup_entry_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_lookup_entry" + transports.CatalogServiceRestInterceptor, "post_get_metadata_job" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_lookup_entry_with_metadata" + transports.CatalogServiceRestInterceptor, "post_get_metadata_job_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_lookup_entry" + transports.CatalogServiceRestInterceptor, "pre_get_metadata_job" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.LookupEntryRequest.pb(catalog.LookupEntryRequest()) + pb_message = catalog.GetMetadataJobRequest.pb(catalog.GetMetadataJobRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -21471,19 +23727,19 @@ def test_lookup_entry_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.Entry.to_json(catalog.Entry()) + return_value = catalog.MetadataJob.to_json(catalog.MetadataJob()) req.return_value.content = return_value - request = catalog.LookupEntryRequest() + request = catalog.GetMetadataJobRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = catalog.Entry() - post_with_metadata.return_value = catalog.Entry(), metadata + post.return_value = catalog.MetadataJob() + post_with_metadata.return_value = catalog.MetadataJob(), metadata - client.lookup_entry( + client.get_metadata_job( request, metadata=[ ("key", "val"), @@ -21496,12 +23752,14 @@ def test_lookup_entry_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_search_entries_rest_bad_request(request_type=catalog.SearchEntriesRequest): +def test_list_metadata_jobs_rest_bad_request( + request_type=catalog.ListMetadataJobsRequest, +): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21516,32 +23774,31 @@ def test_search_entries_rest_bad_request(request_type=catalog.SearchEntriesReque response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.search_entries(request) + client.list_metadata_jobs(request) @pytest.mark.parametrize( "request_type", [ - catalog.SearchEntriesRequest, + catalog.ListMetadataJobsRequest, dict, ], ) -def test_search_entries_rest_call_success(request_type): +def test_list_metadata_jobs_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = catalog.SearchEntriesResponse( - total_size=1086, + # Designate an appropriate value for the returned response. + return_value = catalog.ListMetadataJobsResponse( next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + unreachable_locations=["unreachable_locations_value"], ) # Wrap the value into a proper Response obj @@ -21549,22 +23806,21 @@ def test_search_entries_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.SearchEntriesResponse.pb(return_value) + return_value = catalog.ListMetadataJobsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.search_entries(request) + response = client.list_metadata_jobs(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.SearchEntriesPager) - assert response.total_size == 1086 + assert isinstance(response, pagers.ListMetadataJobsPager) assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert response.unreachable_locations == ["unreachable_locations_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_search_entries_rest_interceptors(null_interceptor): +def test_list_metadata_jobs_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21578,16 +23834,19 @@ def test_search_entries_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_search_entries" + transports.CatalogServiceRestInterceptor, "post_list_metadata_jobs" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_search_entries_with_metadata" + transports.CatalogServiceRestInterceptor, + "post_list_metadata_jobs_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_search_entries" + transports.CatalogServiceRestInterceptor, "pre_list_metadata_jobs" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.SearchEntriesRequest.pb(catalog.SearchEntriesRequest()) + pb_message = catalog.ListMetadataJobsRequest.pb( + catalog.ListMetadataJobsRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -21598,21 +23857,21 @@ def test_search_entries_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.SearchEntriesResponse.to_json( - catalog.SearchEntriesResponse() + return_value = catalog.ListMetadataJobsResponse.to_json( + catalog.ListMetadataJobsResponse() ) req.return_value.content = return_value - request = catalog.SearchEntriesRequest() + request = catalog.ListMetadataJobsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = catalog.SearchEntriesResponse() - post_with_metadata.return_value = catalog.SearchEntriesResponse(), metadata + post.return_value = catalog.ListMetadataJobsResponse() + post_with_metadata.return_value = catalog.ListMetadataJobsResponse(), metadata - client.search_entries( + client.list_metadata_jobs( request, metadata=[ ("key", "val"), @@ -21625,14 +23884,14 @@ def test_search_entries_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_create_metadata_job_rest_bad_request( - request_type=catalog.CreateMetadataJobRequest, +def test_cancel_metadata_job_rest_bad_request( + request_type=catalog.CancelMetadataJobRequest, ): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/metadataJobs/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21647,160 +23906,45 @@ def test_create_metadata_job_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_metadata_job(request) + client.cancel_metadata_job(request) @pytest.mark.parametrize( "request_type", [ - catalog.CreateMetadataJobRequest, + catalog.CancelMetadataJobRequest, dict, ], ) -def test_create_metadata_job_rest_call_success(request_type): +def test_cancel_metadata_job_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["metadata_job"] = { - "name": "name_value", - "uid": "uid_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "labels": {}, - "type_": 1, - "import_spec": { - "source_storage_uri": "source_storage_uri_value", - "source_create_time": {}, - "scope": { - "entry_groups": ["entry_groups_value1", "entry_groups_value2"], - "entry_types": ["entry_types_value1", "entry_types_value2"], - "aspect_types": ["aspect_types_value1", "aspect_types_value2"], - }, - "entry_sync_mode": 1, - "aspect_sync_mode": 1, - "log_level": 1, - }, - "export_spec": { - "scope": { - "organization_level": True, - "projects": ["projects_value1", "projects_value2"], - "entry_groups": ["entry_groups_value1", "entry_groups_value2"], - "entry_types": ["entry_types_value1", "entry_types_value2"], - "aspect_types": ["aspect_types_value1", "aspect_types_value2"], - }, - "output_path": "output_path_value", - }, - "import_result": { - "deleted_entries": 1584, - "updated_entries": 1600, - "created_entries": 1585, - "unchanged_entries": 1798, - "recreated_entries": 1800, - "update_time": {}, - }, - "export_result": { - "exported_entries": 1732, - "error_message": "error_message_value", - }, - "status": { - "state": 1, - "message": "message_value", - "completion_percent": 1930, - "update_time": {}, - }, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = catalog.CreateMetadataJobRequest.meta.fields["metadata_job"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["metadata_job"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["metadata_job"][field])): - del request_init["metadata_job"][field][i][subfield] - else: - del request_init["metadata_job"][field][subfield] + request_init = {"name": "projects/sample1/locations/sample2/metadataJobs/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_metadata_job(request) + response = client.cancel_metadata_job(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert response is None @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_metadata_job_rest_interceptors(null_interceptor): +def test_cancel_metadata_job_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21814,20 +23958,11 @@ def test_create_metadata_job_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - operation.Operation, "_set_result_from_operation" - ), mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_create_metadata_job" - ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, - "post_create_metadata_job_with_metadata", - ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_create_metadata_job" + transports.CatalogServiceRestInterceptor, "pre_cancel_metadata_job" ) as pre: pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = catalog.CreateMetadataJobRequest.pb( - catalog.CreateMetadataJobRequest() + pb_message = catalog.CancelMetadataJobRequest.pb( + catalog.CancelMetadataJobRequest() ) transcode.return_value = { "method": "post", @@ -21839,19 +23974,15 @@ def test_create_metadata_job_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - request = catalog.CreateMetadataJobRequest() + request = catalog.CancelMetadataJobRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.create_metadata_job( + client.cancel_metadata_job( request, metadata=[ ("key", "val"), @@ -21860,56 +23991,129 @@ def test_create_metadata_job_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() -def test_get_metadata_job_rest_bad_request(request_type=catalog.GetMetadataJobRequest): +def test_create_entry_link_rest_bad_request( + request_type=catalog.CreateEntryLinkRequest, +): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/metadataJobs/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2/entryGroups/sample3"} request = request_type(**request_init) - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_metadata_job(request) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_entry_link(request) + + +@pytest.mark.parametrize( + "request_type", + [ + catalog.CreateEntryLinkRequest, + dict, + ], +) +def test_create_entry_link_rest_call_success(request_type): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/entryGroups/sample3"} + request_init["entry_link"] = { + "name": "name_value", + "entry_link_type": "entry_link_type_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "entry_references": [{"name": "name_value", "path": "path_value", "type_": 2}], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = catalog.CreateEntryLinkRequest.meta.fields["entry_link"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + subfields_not_in_runtime = [] -@pytest.mark.parametrize( - "request_type", - [ - catalog.GetMetadataJobRequest, - dict, - ], -) -def test_get_metadata_job_rest_call_success(request_type): - client = CatalogServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["entry_link"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/metadataJobs/sample3"} + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["entry_link"][field])): + del request_init["entry_link"][field][i][subfield] + else: + del request_init["entry_link"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.MetadataJob( + return_value = catalog.EntryLink( name="name_value", - uid="uid_value", - type_=catalog.MetadataJob.Type.IMPORT, + entry_link_type="entry_link_type_value", ) # Wrap the value into a proper Response obj @@ -21917,22 +24121,21 @@ def test_get_metadata_job_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.MetadataJob.pb(return_value) + return_value = catalog.EntryLink.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_metadata_job(request) + response = client.create_entry_link(request) # Establish that the response is the type that we expect. - assert isinstance(response, catalog.MetadataJob) + assert isinstance(response, catalog.EntryLink) assert response.name == "name_value" - assert response.uid == "uid_value" - assert response.type_ == catalog.MetadataJob.Type.IMPORT + assert response.entry_link_type == "entry_link_type_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_metadata_job_rest_interceptors(null_interceptor): +def test_create_entry_link_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -21946,16 +24149,16 @@ def test_get_metadata_job_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_get_metadata_job" + transports.CatalogServiceRestInterceptor, "post_create_entry_link" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_get_metadata_job_with_metadata" + transports.CatalogServiceRestInterceptor, "post_create_entry_link_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_get_metadata_job" + transports.CatalogServiceRestInterceptor, "pre_create_entry_link" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.GetMetadataJobRequest.pb(catalog.GetMetadataJobRequest()) + pb_message = catalog.CreateEntryLinkRequest.pb(catalog.CreateEntryLinkRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -21966,19 +24169,19 @@ def test_get_metadata_job_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.MetadataJob.to_json(catalog.MetadataJob()) + return_value = catalog.EntryLink.to_json(catalog.EntryLink()) req.return_value.content = return_value - request = catalog.GetMetadataJobRequest() + request = catalog.CreateEntryLinkRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = catalog.MetadataJob() - post_with_metadata.return_value = catalog.MetadataJob(), metadata + post.return_value = catalog.EntryLink() + post_with_metadata.return_value = catalog.EntryLink(), metadata - client.get_metadata_job( + client.create_entry_link( request, metadata=[ ("key", "val"), @@ -21991,14 +24194,16 @@ def test_get_metadata_job_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_list_metadata_jobs_rest_bad_request( - request_type=catalog.ListMetadataJobsRequest, +def test_delete_entry_link_rest_bad_request( + request_type=catalog.DeleteEntryLinkRequest, ): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/entryGroups/sample3/entryLinks/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22013,31 +24218,33 @@ def test_list_metadata_jobs_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_metadata_jobs(request) + client.delete_entry_link(request) @pytest.mark.parametrize( "request_type", [ - catalog.ListMetadataJobsRequest, + catalog.DeleteEntryLinkRequest, dict, ], ) -def test_list_metadata_jobs_rest_call_success(request_type): +def test_delete_entry_link_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/entryGroups/sample3/entryLinks/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = catalog.ListMetadataJobsResponse( - next_page_token="next_page_token_value", - unreachable_locations=["unreachable_locations_value"], + return_value = catalog.EntryLink( + name="name_value", + entry_link_type="entry_link_type_value", ) # Wrap the value into a proper Response obj @@ -22045,21 +24252,21 @@ def test_list_metadata_jobs_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = catalog.ListMetadataJobsResponse.pb(return_value) + return_value = catalog.EntryLink.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_metadata_jobs(request) + response = client.delete_entry_link(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListMetadataJobsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable_locations == ["unreachable_locations_value"] + assert isinstance(response, catalog.EntryLink) + assert response.name == "name_value" + assert response.entry_link_type == "entry_link_type_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_metadata_jobs_rest_interceptors(null_interceptor): +def test_delete_entry_link_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22073,19 +24280,16 @@ def test_list_metadata_jobs_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CatalogServiceRestInterceptor, "post_list_metadata_jobs" + transports.CatalogServiceRestInterceptor, "post_delete_entry_link" ) as post, mock.patch.object( - transports.CatalogServiceRestInterceptor, - "post_list_metadata_jobs_with_metadata", + transports.CatalogServiceRestInterceptor, "post_delete_entry_link_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_list_metadata_jobs" + transports.CatalogServiceRestInterceptor, "pre_delete_entry_link" ) as pre: pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = catalog.ListMetadataJobsRequest.pb( - catalog.ListMetadataJobsRequest() - ) + pb_message = catalog.DeleteEntryLinkRequest.pb(catalog.DeleteEntryLinkRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -22096,21 +24300,19 @@ def test_list_metadata_jobs_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = catalog.ListMetadataJobsResponse.to_json( - catalog.ListMetadataJobsResponse() - ) + return_value = catalog.EntryLink.to_json(catalog.EntryLink()) req.return_value.content = return_value - request = catalog.ListMetadataJobsRequest() + request = catalog.DeleteEntryLinkRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = catalog.ListMetadataJobsResponse() - post_with_metadata.return_value = catalog.ListMetadataJobsResponse(), metadata + post.return_value = catalog.EntryLink() + post_with_metadata.return_value = catalog.EntryLink(), metadata - client.list_metadata_jobs( + client.delete_entry_link( request, metadata=[ ("key", "val"), @@ -22123,14 +24325,14 @@ def test_list_metadata_jobs_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_cancel_metadata_job_rest_bad_request( - request_type=catalog.CancelMetadataJobRequest, -): +def test_get_entry_link_rest_bad_request(request_type=catalog.GetEntryLinkRequest): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/metadataJobs/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/entryGroups/sample3/entryLinks/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22145,45 +24347,55 @@ def test_cancel_metadata_job_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.cancel_metadata_job(request) + client.get_entry_link(request) @pytest.mark.parametrize( "request_type", [ - catalog.CancelMetadataJobRequest, + catalog.GetEntryLinkRequest, dict, ], ) -def test_cancel_metadata_job_rest_call_success(request_type): +def test_get_entry_link_rest_call_success(request_type): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/metadataJobs/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/entryGroups/sample3/entryLinks/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = catalog.EntryLink( + name="name_value", + entry_link_type="entry_link_type_value", + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = catalog.EntryLink.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.cancel_metadata_job(request) + response = client.get_entry_link(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, catalog.EntryLink) + assert response.name == "name_value" + assert response.entry_link_type == "entry_link_type_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_cancel_metadata_job_rest_interceptors(null_interceptor): +def test_get_entry_link_rest_interceptors(null_interceptor): transport = transports.CatalogServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -22197,12 +24409,16 @@ def test_cancel_metadata_job_rest_interceptors(null_interceptor): ) as req, mock.patch.object( path_template, "transcode" ) as transcode, mock.patch.object( - transports.CatalogServiceRestInterceptor, "pre_cancel_metadata_job" + transports.CatalogServiceRestInterceptor, "post_get_entry_link" + ) as post, mock.patch.object( + transports.CatalogServiceRestInterceptor, "post_get_entry_link_with_metadata" + ) as post_with_metadata, mock.patch.object( + transports.CatalogServiceRestInterceptor, "pre_get_entry_link" ) as pre: pre.assert_not_called() - pb_message = catalog.CancelMetadataJobRequest.pb( - catalog.CancelMetadataJobRequest() - ) + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = catalog.GetEntryLinkRequest.pb(catalog.GetEntryLinkRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -22213,15 +24429,19 @@ def test_cancel_metadata_job_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = catalog.EntryLink.to_json(catalog.EntryLink()) + req.return_value.content = return_value - request = catalog.CancelMetadataJobRequest() + request = catalog.GetEntryLinkRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = catalog.EntryLink() + post_with_metadata.return_value = catalog.EntryLink(), metadata - client.cancel_metadata_job( + client.get_entry_link( request, metadata=[ ("key", "val"), @@ -22230,6 +24450,8 @@ def test_cancel_metadata_job_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): @@ -23155,6 +25377,70 @@ def test_cancel_metadata_job_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_entry_link_empty_call_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_entry_link), "__call__" + ) as call: + client.create_entry_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.CreateEntryLinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_entry_link_empty_call_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_entry_link), "__call__" + ) as call: + client.delete_entry_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.DeleteEntryLinkRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_entry_link_empty_call_rest(): + client = CatalogServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_entry_link), "__call__") as call: + client.get_entry_link(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = catalog.GetEntryLinkRequest() + + assert args[0] == request_msg + + def test_catalog_service_rest_lro_client(): client = CatalogServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -23231,6 +25517,9 @@ def test_catalog_service_base_transport(): "get_metadata_job", "list_metadata_jobs", "cancel_metadata_job", + "create_entry_link", + "delete_entry_link", + "get_entry_link", "get_location", "list_locations", "get_operation", @@ -23578,6 +25867,15 @@ def test_catalog_service_client_transport_session_collision(transport_name): session1 = client1.transport.cancel_metadata_job._session session2 = client2.transport.cancel_metadata_job._session assert session1 != session2 + session1 = client1.transport.create_entry_link._session + session2 = client2.transport.create_entry_link._session + assert session1 != session2 + session1 = client1.transport.delete_entry_link._session + session2 = client2.transport.delete_entry_link._session + assert session1 != session2 + session1 = client1.transport.get_entry_link._session + session2 = client2.transport.get_entry_link._session + assert session1 != session2 def test_catalog_service_grpc_transport_channel(): @@ -23825,10 +26123,41 @@ def test_parse_entry_group_path(): assert expected == actual -def test_entry_type_path(): +def test_entry_link_path(): project = "winkle" location = "nautilus" - entry_type = "scallop" + entry_group = "scallop" + entry_link = "abalone" + expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entryLinks/{entry_link}".format( + project=project, + location=location, + entry_group=entry_group, + entry_link=entry_link, + ) + actual = CatalogServiceClient.entry_link_path( + project, location, entry_group, entry_link + ) + assert expected == actual + + +def test_parse_entry_link_path(): + expected = { + "project": "squid", + "location": "clam", + "entry_group": "whelk", + "entry_link": "octopus", + } + path = CatalogServiceClient.entry_link_path(**expected) + + # Check that the path construction is reversible. + actual = CatalogServiceClient.parse_entry_link_path(path) + assert expected == actual + + +def test_entry_type_path(): + project = "oyster" + location = "nudibranch" + entry_type = "cuttlefish" expected = "projects/{project}/locations/{location}/entryTypes/{entry_type}".format( project=project, location=location, @@ -23840,9 +26169,9 @@ def test_entry_type_path(): def test_parse_entry_type_path(): expected = { - "project": "abalone", - "location": "squid", - "entry_type": "clam", + "project": "mussel", + "location": "winkle", + "entry_type": "nautilus", } path = CatalogServiceClient.entry_type_path(**expected) @@ -23851,10 +26180,36 @@ def test_parse_entry_type_path(): assert expected == actual +def test_glossary_path(): + project = "scallop" + location = "abalone" + glossary = "squid" + expected = "projects/{project}/locations/{location}/glossaries/{glossary}".format( + project=project, + location=location, + glossary=glossary, + ) + actual = CatalogServiceClient.glossary_path(project, location, glossary) + assert expected == actual + + +def test_parse_glossary_path(): + expected = { + "project": "clam", + "location": "whelk", + "glossary": "octopus", + } + path = CatalogServiceClient.glossary_path(**expected) + + # Check that the path construction is reversible. + actual = CatalogServiceClient.parse_glossary_path(path) + assert expected == actual + + def test_metadata_job_path(): - project = "whelk" - location = "octopus" - metadataJob = "oyster" + project = "oyster" + location = "nudibranch" + metadataJob = "cuttlefish" expected = ( "projects/{project}/locations/{location}/metadataJobs/{metadataJob}".format( project=project, @@ -23868,9 +26223,9 @@ def test_metadata_job_path(): def test_parse_metadata_job_path(): expected = { - "project": "nudibranch", - "location": "cuttlefish", - "metadataJob": "mussel", + "project": "mussel", + "location": "winkle", + "metadataJob": "nautilus", } path = CatalogServiceClient.metadata_job_path(**expected) @@ -23880,7 +26235,7 @@ def test_parse_metadata_job_path(): def test_common_billing_account_path(): - billing_account = "winkle" + billing_account = "scallop" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -23890,7 +26245,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nautilus", + "billing_account": "abalone", } path = CatalogServiceClient.common_billing_account_path(**expected) @@ -23900,7 +26255,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "scallop" + folder = "squid" expected = "folders/{folder}".format( folder=folder, ) @@ -23910,7 +26265,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "abalone", + "folder": "clam", } path = CatalogServiceClient.common_folder_path(**expected) @@ -23920,7 +26275,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "squid" + organization = "whelk" expected = "organizations/{organization}".format( organization=organization, ) @@ -23930,7 +26285,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "clam", + "organization": "octopus", } path = CatalogServiceClient.common_organization_path(**expected) @@ -23940,7 +26295,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "whelk" + project = "oyster" expected = "projects/{project}".format( project=project, ) @@ -23950,7 +26305,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "octopus", + "project": "nudibranch", } path = CatalogServiceClient.common_project_path(**expected) @@ -23960,8 +26315,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "oyster" - location = "nudibranch" + project = "cuttlefish" + location = "mussel" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -23972,8 +26327,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "cuttlefish", - "location": "mussel", + "project": "winkle", + "location": "nautilus", } path = CatalogServiceClient.common_location_path(**expected) diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py index eb5638b911e3..ab973c7dc066 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py @@ -80,6 +80,7 @@ data_profile, data_quality, datascans, + datascans_common, processing, resources, service, @@ -7119,6 +7120,7 @@ def test_create_data_scan_rest_call_success(request_type): "job_end_trigger": {}, }, }, + "catalog_publishing_enabled": True, }, "data_profile_spec": { "sampling_percent": 0.17070000000000002, @@ -7136,6 +7138,7 @@ def test_create_data_scan_rest_call_success(request_type): "table_type": 1, "connection": "connection_value", "location": "location_value", + "project": "project_value", }, "storage_config": { "include_patterns": [ @@ -7165,7 +7168,14 @@ def test_create_data_scan_rest_call_success(request_type): "dimensions": [ {"dimension": {"name": "name_value"}, "passed": True, "score": 0.54} ], - "columns": [{"column": "column_value", "score": 0.54}], + "columns": [ + { + "column": "column_value", + "score": 0.54, + "passed": True, + "dimensions": {}, + } + ], "rules": [ { "rule": {}, @@ -7189,6 +7199,7 @@ def test_create_data_scan_rest_call_success(request_type): "post_scan_actions_result": { "bigquery_export_result": {"state": 1, "message": "message_value"} }, + "catalog_publishing_status": {"state": 1}, }, "data_profile_result": { "row_count": 992, @@ -7511,6 +7522,7 @@ def test_update_data_scan_rest_call_success(request_type): "job_end_trigger": {}, }, }, + "catalog_publishing_enabled": True, }, "data_profile_spec": { "sampling_percent": 0.17070000000000002, @@ -7528,6 +7540,7 @@ def test_update_data_scan_rest_call_success(request_type): "table_type": 1, "connection": "connection_value", "location": "location_value", + "project": "project_value", }, "storage_config": { "include_patterns": [ @@ -7557,7 +7570,14 @@ def test_update_data_scan_rest_call_success(request_type): "dimensions": [ {"dimension": {"name": "name_value"}, "passed": True, "score": 0.54} ], - "columns": [{"column": "column_value", "score": 0.54}], + "columns": [ + { + "column": "column_value", + "score": 0.54, + "passed": True, + "dimensions": {}, + } + ], "rules": [ { "rule": {}, @@ -7581,6 +7601,7 @@ def test_update_data_scan_rest_call_success(request_type): "post_scan_actions_result": { "bigquery_export_result": {"state": 1, "message": "message_value"} }, + "catalog_publishing_status": {"state": 1}, }, "data_profile_result": { "row_count": 992, diff --git a/scripts/client-post-processing/doc-formatting.yaml b/scripts/client-post-processing/doc-formatting.yaml index 89322bf519dc..5d8c7a975ca8 100644 --- a/scripts/client-post-processing/doc-formatting.yaml +++ b/scripts/client-post-processing/doc-formatting.yaml @@ -548,3 +548,17 @@ replacements: Set empty values to clear the metadata. Refer to documentation in count: 1 + - paths: [ + packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/business_glossary.py, + ] + before: | + Examples of using a filter are: + \ -------------------------------\n + \ ``immediate_parent="projects/\{project_id_or_number\}/locations/\{location_id\}/glossaries/\{glossary_id\}"`` + \ -------------------------------------------------------------------------------------------------------\n + \ ``immediate_parent="projects/\{project_id_or_number\}/locations/\{location_id\}/glossaries/\{glossary_id\}/categories/\{category_id\}"`` + after: | + Examples of using a filter are:\n + - ``immediate_parent="projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}"`` + - ``immediate_parent="projects/{project_id_or_number}/locations/{location_id}/glossaries/{glossary_id}/categories/{category_id}"`` + count: 2